# Core
library(tidyverse)
## ── Attaching packages ─────────────────────────────────────── tidyverse 1.3.1 ──
## ✓ ggplot2 3.3.5 ✓ purrr 0.3.4
## ✓ tibble 3.1.6 ✓ dplyr 1.0.7
## ✓ tidyr 1.1.4 ✓ stringr 1.4.0
## ✓ readr 2.1.1 ✓ forcats 0.5.1
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## x dplyr::filter() masks stats::filter()
## x dplyr::lag() masks stats::lag()
library(grid)
library(gridExtra)
##
## Attaching package: 'gridExtra'
## The following object is masked from 'package:dplyr':
##
## combine
# Load data
library(readxl)
# Time Series
library(timetk)
## Registered S3 method overwritten by 'tune':
## method from
## required_pkgs.model_spec parsnip
library(forecast)
## Registered S3 method overwritten by 'quantmod':
## method from
## as.zoo.data.frame zoo
library(TSA)
## Registered S3 methods overwritten by 'TSA':
## method from
## fitted.Arima forecast
## plot.Arima forecast
##
## Attaching package: 'TSA'
## The following object is masked from 'package:readr':
##
## spec
## The following objects are masked from 'package:stats':
##
## acf, arima
## The following object is masked from 'package:utils':
##
## tar
library(NTS)
library(MSwM)
## Loading required package: parallel
library(tsDyn)
library(fNonlinear)
## Loading required package: timeDate
##
## Attaching package: 'timeDate'
## The following objects are masked from 'package:TSA':
##
## kurtosis, skewness
## Loading required package: timeSeries
## Loading required package: fBasics
library(dlm)
##
## Attaching package: 'dlm'
## The following object is masked from 'package:ggplot2':
##
## %+%
library(astsa)
##
## Attaching package: 'astsa'
## The following object is masked from 'package:fBasics':
##
## nyse
## The following object is masked from 'package:forecast':
##
## gas
library(seasonal)
##
## Attaching package: 'seasonal'
## The following object is masked from 'package:astsa':
##
## unemp
## The following objects are masked from 'package:timeSeries':
##
## outlier, series
## The following object is masked from 'package:tibble':
##
## view
library(tseries)
library(astsa)
library(tsoutliers)
library(urca)
# Machine Learning
library(tidymodels)
## ── Attaching packages ────────────────────────────────────── tidymodels 0.1.4 ──
## ✓ broom 0.7.11 ✓ rsample 0.1.1
## ✓ dials 0.0.10 ✓ tune 0.1.6
## ✓ infer 1.0.0 ✓ workflows 0.2.4
## ✓ modeldata 0.1.1 ✓ workflowsets 0.1.0
## ✓ parsnip 0.1.7 ✓ yardstick 0.0.9
## ✓ recipes 0.1.17
## Warning: package 'broom' was built under R version 4.1.2
## ── Conflicts ───────────────────────────────────────── tidymodels_conflicts() ──
## x dlm::%+%() masks ggplot2::%+%()
## x yardstick::accuracy() masks forecast::accuracy()
## x gridExtra::combine() masks dplyr::combine()
## x scales::discard() masks purrr::discard()
## x timeSeries::filter() masks dplyr::filter(), stats::filter()
## x recipes::fixed() masks stringr::fixed()
## x timeSeries::lag() masks dplyr::lag(), stats::lag()
## x yardstick::spec() masks TSA::spec(), readr::spec()
## x recipes::step() masks stats::step()
## x seasonal::view() masks tibble::view()
## • Use tidymodels_prefer() to resolve common conflicts.
library(modeltime)
##
## Attaching package: 'modeltime'
## The following object is masked from 'package:seasonal':
##
## trend
## The following object is masked from 'package:TSA':
##
## season
library(modeltime.ensemble)
## Loading required package: modeltime.resample
library(modeltime.resample)
library(timetk)
getPerformance = function(pred, val) {
res = pred - val
MAE = sum(abs(res))/length(val)
RSS = sum(res^2)
MSE = RSS/length(val)
RMSE = sqrt(MSE)
perf = data.frame(MAE, RSS, MSE, RMSE)
return(perf)
}
#getPerformance(pred, val)
El presente apartado esta dividido en dos secciones las cuales muestran los resultados obtenidos que buscan respaldar el objetivo planteado. La primera sección se compara y selecciona el mejor modelo de pronostico de serie de tiempo según el tipo de modelo: lineales, no lineales y de minería de datos, para posteriormente, realizar un ensamble con los mejores tres métodos. La segunda sección presenta una prueba de tensión en el cual se plantearán diferentes escenarios para estimar el potencial impacto de una caída abrupta de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en colones y dólares en Costa Rica para diciembre 2021.
En la figura @ref(fig:evolucionserie) se muestran los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en colones y dolares para febrero-2001 a julio-2021 y de la cual se genera el análisis para identificar las siguientes características: tendencias o ciclos, existencia de estabilidad en las observaciones, variancia de las observaciones (constante o variable en el tiempo), existencia de observaciones inusuales y de puntos extremos, cambios en la estructura de la serie, entre otras.
SeriesDatos <- read_excel("~/Google Drive/Mi unidad/1.Maestria II Ciclo 2021/Curso de Analisis De Casos/Caso II/Datos/Base Datos.xlsx")%>%
janitor::clean_names()%>%
mutate(ActivoNeto=paste0(activo_neto,"-01"))%>%
rename('ActNetCRC'=crc,
'ActNetUSD'=usd)
actnetcrc<- ts(SeriesDatos[,2],start =c(2001,2),end=c(2021,7), frequency = 12)
actnetusd<- ts(SeriesDatos[,3],start =c(2001,2),end=c(2021,7), frequency = 12)
actnet <- cbind(actnetcrc,actnetusd)
fitcrc<-actnetcrc %>%
seas()
fitusd<- actnetusd %>%
seas()
pseries<-autoplot(actnet,facets=TRUE) +
xlab("Mes") +
ylab("Millones")+
theme_bw()
ptendseriecr<-autoplot(actnetcrc, series="Data") +
autolayer(trendcycle(fitcrc), series="Tendencia") +
#autolayer(seasadj(fitcrc), series="Ajustada Estacionalmente") +
xlab("Mes") + ylab("Millones") +
scale_colour_manual(values=c("grey70","red","royalblue4"),
breaks=c("Data","Ajustada Estacionalmente","Tendencia"))+
theme_bw()+
ggtitle("Colones")+
geom_vline(xintercept = 2015 + (06 - 1) / 12,linetype = "dashed", colour ='gray' )+
geom_vline(xintercept = 2016 + (11 - 1) / 12,linetype = "dashed", colour ='gray' )+
scale_y_continuous(breaks = seq(0,1200000,200000))
ptendserieusd<-autoplot(actnetusd, series="Data") +
autolayer(trendcycle(fitusd), series="Tendencia") +
#autolayer(seasadj(fitusd), series="Ajustada Estacionalmente") +
xlab("Mes") + ylab("Saldos") +
ggtitle("Dolares") +
scale_colour_manual(values=c("grey70","red","royalblue4"),
breaks=c("Data","Ajustada Estacionalmente","Tendencia"))+
theme_bw()+
geom_vline(xintercept = 2015 + (06 - 1) / 12,linetype = "dashed", colour ='gray' )+
geom_vline(xintercept = 2016 + (12 - 1) / 12,linetype = "dashed", colour ='gray' )+
scale_y_continuous(breaks = seq(0,2000,250))
grid.arrange(ptendseriecr, ptendserieusd, ncol = 1)
Costa Rica:Evolución de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en colones y dolares, febrero-2001 a julio-2021
#https://www.sepg.pap.hacienda.gob.es/sitios/sepg/es-ES/Presupuestos/DocumentacionEstadisticas/Documentacion/Documents/DOCUMENTOS%20DE%20TRABAJO/D95006.pdf
otlier_crc<- tso(y = actnetcrc,types=c("SLS","AO","LS","TC","IO"))
## Warning in locate.outliers.iloop(resid = resid, pars = pars, cval = cval, :
## stopped when 'maxit.iloop' was reached
## Warning in locate.outliers.iloop(resid = resid, pars = pars, cval = cval, :
## stopped when 'maxit.iloop' was reached
## Warning in locate.outliers.iloop(resid = resid, pars = pars, cval = cval, :
## stopped when 'maxit.iloop' was reached
## Warning in locate.outliers.oloop(y = y, fit = fit, types = types, cval = cval, :
## stopped when 'maxit.oloop = 4' was reached
plot(otlier_crc)
# otlier_usd<- tso(y = actnetusd,types=c("SLS","AO","LS","TC","IO"))
# otlier_usd
A partir del análisis de la serie se identificaron las siguientes característica:
Para ambas series del activo neto , colones y dolares, se observa una tendencia creciente desde febrero 2001, así como un aumento de la variabilidad conforme aumenta los meses.
Para el periodo de mayo 2015 a octubre 2016 (lineas punteadas gris) hay un cambio de nivel (Valor extremo LS[^4]) en el volumen mensual del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero tanto en colones como en dolares, sin embargo, es inverso en ambas series, lo cual sugiere que posiblemente los participantes trasladaron sus inversiones de dolares a colones, esto se explica por:
La adopción del régimen de tipo de cambio de flotación administrada por parte del Banco Central de Costa Rica (BCCR) y el incremento en el superávit de divisas del sector privado incidió en la apreciación del colón (disminución del tipo de cambio) [@bccr5].
La reducción de la tasa de interés de política monetaria por parte del BCCR en 300 puntos base en el 2015, con el objetivo de estimular la economía, promoviendo el crecimiento en el crédito nacional y para reducir el costo de la deuda para el gobierno [@mv1; @mv2].
En el último trimestre del 2015, la industria tuvo una contracción de la liquidez en dolares, explicado por la salida de los participantes hacia el mercado internacional [@mv2].
Para el activo neto en colones se observa que en abril 2020 el activo neto en colones creció en 19.5 por ciento respecto al mismo periodo del año pasado, este comportamiento creciente y acelerado se mantuvo hasta diciembre de ese mismo año. Lo cual coincide con el efecto de la crisis sanitaria por COVID-19 que inicio en Costa Rica en marzo 2020, esta fecha es identificada como un valor extremo de cambio temporal [^4]. Esta situación sanitaria provocó un aumento de la incertidumbre en la economía mundial incidiendo en que los agentes económicos buscaran refugiarse en activos líquidos [@bccr1]. Un comportamiento similar ocurre para el activo neto en dolares.
Respecto a la estacionalidad de las series, se observa en la figura @ref(fig:estacionalidad) que para el caso de colones los saldos del activo neto tienden a ser mayores en enero y octubre, y presentar valores relativamente bajos al finalizar el año noviembre y diciembre, esto es de esperar debido a la época navideña y que diciembre comúnmente se labora 3 de las 4 semana del mes. Para el caso de dolares se observa que los meses con mayores saldos del activo neto se dan de mayo a agosto, y al igual que el caso de colones, se observa que los dos últimos meses del año los mismos se reduce.
pestacioncr <- fitcrc %>%
seasonal() %>%
ggsubseriesplot() +
ylab("Estacionalidad")+
theme_bw()+
ggtitle("Colones")
pestacionusd <- fitusd %>%
seasonal() %>%
ggsubseriesplot() +
ylab("Estacionalidad")+
theme_bw()+
ggtitle("Dolares")
grid.arrange(pestacioncr, pestacionusd, nrow = 2,ncol=1)
Costa Rica:Indice Estacional de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en colones y dolares, febrero-2001 a julio-2021
Por otro lado, respecto al componente irregular para la serie en colones (ver en el @ref(anexos), la figura @ref(fig:descomposicionplotanexo)) ,se comporta de hasta el año 2012 de forma aditiva, en otras, palabras la variancia de la serie no fluctua con el nivel de la serie, sin embargo, a partir de 2012 hacia delante se observa que la variación aumenta con el nivel de la serie, por lo cual se podría argumentar que la serie tiene un comportamiento mixto (aditivo y multiplicativo). En contra parte, para la serie en dolares no se observa una variación similar en todo el periodo y que no varía con respecto al nivel de la serie.
fitcrc_add<-actnetcrc %>%
decompose(type = "additive")
fitcrc_multi<-actnetcrc %>%
decompose(type = "multiplicative")
fitusd_add<- actnetusd %>%
decompose(type = "additive")
fitusd_multi<- actnetusd %>%
decompose(type = "multiplicative")
pdescompcrcadd <- fitcrc_add%>%
autoplot() +
xlab("Mes")+
ggtitle("Aditiva: Colones") +
theme_bw()
pdescompcrcmult<-fitcrc_multi%>%
autoplot() + xlab("Mes") +
ggtitle("Multiplicativa: Colones")+
theme_bw()
pdescompusdadd <- fitusd_add%>%
autoplot() +
xlab("Mes")+
ggtitle("Aditiva: Dolares") +
theme_bw()
pdescompusdmult<-fitusd_multi%>%
autoplot() + xlab("Mes") +
ggtitle("Multiplicativa: Dolares")+
theme_bw()
descompo<-grid.arrange(pdescompcrcadd,pdescompcrcmult, pdescompusdadd,pdescompusdmult, nrow = 2,ncol=2)
Para confirmar cual modelo (aditivo o multiplicativo) se ajusta mejor a cada serie se procedió a evaluar si el componente irregular identificando se ajusta a una distribución normal, para lo cual se realizaron la pruebas de hipótesis de normalidad Shapiro-Wilk y Jarque-Bera, así como una inspección gráfica por medio de Cuantil- Cuantil (qqplot). En la figura @ref(fig:irregularcrc) se puede identificar que para el caso de la serie en colones, el mejor modelo es el multiplicativo mientras que para la serie en dolares es el aditivo.
Aleatorio_Desc<-cbind(
Aleatorio_crc_add=fitcrc_add$random,
Aleatorio_crc_multi=fitcrc_multi$random,
Aleatorio_usd_add=fitusd_add$random,
Aleatorio_usd_multi=fitusd_multi$random)%>%
as.data.frame()
jb_res_crc_add<-jarque.bera.test(Aleatorio_Desc$Aleatorio_crc_add[!is.na(Aleatorio_Desc$Aleatorio_crc_add)]) # Cumple
jb_res_crc_mult<-jarque.bera.test(Aleatorio_Desc$Aleatorio_crc_multi[!is.na(Aleatorio_Desc$Aleatorio_crc_multi)]) # Cumple
jb_res_usd_add<-jarque.bera.test(Aleatorio_Desc$Aleatorio_usd_add[!is.na(Aleatorio_Desc$Aleatorio_usd_add)]) # Cumple
jb_res_usd_multi<-jarque.bera.test(Aleatorio_Desc$Aleatorio_usd_multi[!is.na(Aleatorio_Desc$Aleatorio_usd_multi)]) # Cumple
sw_res_crc_add<-shapiro.test(Aleatorio_Desc$Aleatorio_crc_add[!is.na(Aleatorio_Desc$Aleatorio_crc_add)]) # Cumple
sw_res_crc_mult<-shapiro.test(Aleatorio_Desc$Aleatorio_crc_multi[!is.na(Aleatorio_Desc$Aleatorio_crc_multi)]) # Cumple
sw_res_usd_add<-shapiro.test(Aleatorio_Desc$Aleatorio_usd_add[!is.na(Aleatorio_Desc$Aleatorio_usd_add)]) # Cumple
sw_res_usd_multi<-shapiro.test(Aleatorio_Desc$Aleatorio_usd_multi[!is.na(Aleatorio_Desc$Aleatorio_usd_multi)]) # Cumple
## Gráficosde qqplot
p1<-ggplot(Aleatorio_Desc, aes(sample = Aleatorio_crc_add))+
stat_qq() +
stat_qq_line()+
ggtitle("Aditiva - Colones") +
labs(subtitle = paste("Prubas de Normalidad (Estadístico,P-Value):Shapiro-Wilk:",round(sw_res_crc_add$statistic,3),",",round(sw_res_crc_add$p.value,4), "y",
"Jarque-Bera",round(jb_res_crc_add$statistic,3),",",round(jb_res_crc_add$p.value,4)))+
theme_bw()
p2<-ggplot(Aleatorio_Desc, aes(sample = Aleatorio_crc_multi))+
stat_qq() +
stat_qq_line()+
ggtitle("Multiplicativa - Colones")+
labs(subtitle = paste("Prubas de Normalidad (Estadístico,P-Value):Shapiro Wilk:",round(sw_res_crc_mult$statistic,3),",",round(sw_res_crc_mult$p.value,4), "y",
"Jarque-Bera",round(jb_res_crc_mult$statistic,3),",",round(jb_res_crc_mult$p.value,4)))+
theme_bw()
p3<-ggplot(Aleatorio_Desc, aes(sample = Aleatorio_usd_add))+
stat_qq() +
stat_qq_line()+
ggtitle("Aditiva - Dolares")+
labs(subtitle = paste("Prubas de Normalidad (Estadístico,P-Value): Shapiro Wilk:",round(sw_res_usd_add$statistic,3),",",round(sw_res_usd_add$p.value,4), "y",
"Jarque-Bera",round(jb_res_usd_add$statistic,3),",",round(jb_res_usd_add$p.value,4)))+
theme_bw()
p4<-ggplot(Aleatorio_Desc, aes(sample = Aleatorio_usd_multi))+
stat_qq() +
stat_qq_line()+
ggtitle("Multiplicativa - Dolares")+
labs(subtitle = paste("Prubas de Normalidad (Estadístico,P-Value): Shapiro Wilk:",round(sw_res_usd_multi$statistic,3),",",round(sw_res_usd_multi$p.value,4), "y",
"Jarque-Bera",round(jb_res_usd_multi$statistic,3),",",round(jb_res_usd_multi$p.value,4)))+
theme_bw()
grid.arrange(p1,p2,p3,p4,nrow=2, ncol = 2)
## Warning: Removed 12 rows containing non-finite values (stat_qq).
## Warning: Removed 12 rows containing non-finite values (stat_qq_line).
## Warning: Removed 12 rows containing non-finite values (stat_qq).
## Warning: Removed 12 rows containing non-finite values (stat_qq_line).
## Warning: Removed 12 rows containing non-finite values (stat_qq).
## Warning: Removed 12 rows containing non-finite values (stat_qq_line).
## Warning: Removed 12 rows containing non-finite values (stat_qq).
## Warning: Removed 12 rows containing non-finite values (stat_qq_line).
Costa Rica: QQPlot de los residuos de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en colones y dolares por tipo de descomposición, febrero-2001 a julio-2021
MaxLag2<-length(actnetcrc)/4
## Media Constante
### Niveles
# H0: No es estacionario
# H1: Es estacionario
adf_org_crc<-adf.test(actnetcrc,alternative="stationary") # Media no constante
adf_org_usd<-adf.test(actnetusd,alternative="stationary") # Media no constante
## Realiza la prueba de raíz unitaria de Zivot \ & Andrews, que permite una ruptura en un punto desconocido en la intersección, la tendencia lineal o en ambas.
## Esta prueba se basa en la estimación recursiva de una regresión de prueba. El estadístico de prueba se define como el estadístico t mínimo del coeficiente de la variable endógena rezagada.
## Recuérdese que en las pruebas a evaluar la hipótesis nula es presencia de raíz unitaria, mientras que la alternativa es estacionariedad.
## La prueba es muy sensible, realice pruebas y siempre daba resultados o pvalues diferente para una distribucion normal 1 , 0
za_org_crc<-ur.za(window(actnetcrc,start=c(2001,2),end=c(2020,2)), model="both")
summary(za_org_crc)
##
## ################################
## # Zivot-Andrews Unit Root Test #
## ################################
##
##
## Call:
## lm(formula = testmat)
##
## Residuals:
## Min 1Q Median 3Q Max
## -112020 -19964 -694 16741 158850
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -17396.37000 6617.37451 -2.629 0.00916 **
## y.l1 0.70943 0.04757 14.912 < 0.0000000000000002 ***
## trend 1094.74967 186.94371 5.856 0.0000000168 ***
## du 42188.76580 13230.73117 3.189 0.00163 **
## dt -980.19378 340.74430 -2.877 0.00441 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 37160 on 223 degrees of freedom
## (1 observation deleted due to missingness)
## Multiple R-squared: 0.9805, Adjusted R-squared: 0.9801
## F-statistic: 2802 on 4 and 223 DF, p-value: < 0.00000000000000022
##
##
## Teststatistic: -6.1077
## Critical values: 0.01= -5.57 0.05= -5.08 0.1= -4.82
##
## Potential break point at position: 171
plot(za_org_crc)
time(actnetcrc)[171]
## [1] 2015.25
za_org_usd<-ur.za(window(actnetusd,start=c(2001,2),end=c(2020,3)), model="both")
summary(za_org_usd)
##
## ################################
## # Zivot-Andrews Unit Root Test #
## ################################
##
##
## Call:
## lm(formula = testmat)
##
## Residuals:
## Min 1Q Median 3Q Max
## -171.049 -28.592 -2.375 22.788 219.846
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -0.48428 8.84633 -0.055 0.95639
## y.l1 0.81735 0.03686 22.173 < 0.0000000000000002 ***
## trend 1.02909 0.22921 4.490 0.0000114 ***
## du 45.46407 16.89895 2.690 0.00768 **
## dt -1.03127 0.34119 -3.023 0.00280 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 53.66 on 224 degrees of freedom
## (1 observation deleted due to missingness)
## Multiple R-squared: 0.9817, Adjusted R-squared: 0.9813
## F-statistic: 3000 on 4 and 224 DF, p-value: < 0.00000000000000022
##
##
## Teststatistic: -4.9551
## Critical values: 0.01= -5.57 0.05= -5.08 0.1= -4.82
##
## Potential break point at position: 155
plot(za_org_usd)
time(actnetusd)[155]
## [1] 2013.917
En relación a la estacionariedad[^5] de las series, ambas no cumplen con dicha condición ya que presentan tendencia creciente y por ende no tienen media constante en el tiempo. Para confirmar esto realiza la prueba de hipótesis de Dickey-Fuller aumentada donde lo hipótesis nula es que la serie tiene raíz unitaria (proceso no estacionario), en ambos casos no se rechaza la hipótesis nula (Serie Colones: estadístico: -3.0082767 y valor-p: 0.1515055 y la Serie Dolares: estadístico: -2.7303393 y valor-p: 0.2684702), y se puede observar que la Función de Autocorrelación Simple Muestral (ACF) decae lentamente a 0 (Figuras @ref(fig:acfpacfseriescrc) y @ref(fig:acfpacfseriesusd)), esto sugiere que para realizar estacionaria las series se podrían transformar a logaritmo y diferenciar.
autocorrecrc<-acf2(actnetcrc,max.lag = MaxLag2)
Función de autocorrelación y autocorrelación parcial estimadas de la serie de cronológica de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en colones, febrero 2001 a diciembre-2020
#actnetcrc%>% ggtsdisplay(main="Colones")
autocorreusd<-acf2(actnetcrc,max.lag = MaxLag2)
Función de autocorrelación y autocorrelación parcial estimadas de la serie de cronológica de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en dolares, febrero 2001 a diciembre-2020
# actnetusd%>% ggtsdisplay(main="Dolares")
crclineal<-fNonlinear::tnnTest(actnetusd, lag = 1, title = NULL, description = NULL)
usdlineal<-fNonlinear::tnnTest(actnetcrc, lag = 1, title = NULL, description = NULL)
# Kennan tests for nonlineary
#
# La hipótesis nula de que la serie de tiempo sigue algún proceso de AR.
Keenan.test(log(actnetcrc))
## $test.stat
## [1] 6.554213
##
## $p.value
## [1] 0.01107209
##
## $order
## [1] 1
Keenan.test(log(actnetcrc), order=1)
## $test.stat
## [1] 6.554213
##
## $p.value
## [1] 0.01107209
##
## $order
## [1] 1
Keenan.test(log(actnetcrc), order=2)
## $test.stat
## [1] 7.604942
##
## $p.value
## [1] 0.006268688
##
## $order
## [1] 2
Keenan.test(log(actnetcrc), order=3)
## $test.stat
## [1] 12.3357
##
## $p.value
## [1] 0.0005315724
##
## $order
## [1] 3
Keenan.test(log(actnetusd))
## $test.stat
## [1] 6.742342
##
## $p.value
## [1] 0.009991382
##
## $order
## [1] 1
Keenan.test(log(actnetusd), order=1)
## $test.stat
## [1] 6.742342
##
## $p.value
## [1] 0.009991382
##
## $order
## [1] 1
Keenan.test(log(actnetusd), order=2)
## $test.stat
## [1] 6.162359
##
## $p.value
## [1] 0.01373462
##
## $order
## [1] 2
Keenan.test(log(actnetusd), order=3)
## $test.stat
## [1] 7.098561
##
## $p.value
## [1] 0.008242026
##
## $order
## [1] 3
Lo que respecta a la linealidad de las series, se observa que las mismas cumplen con la linealidad en la media lo que es confirmado con la prueba de hipótesis de Teraesvirta, de la cual se concluye que no hay suficiente evidencia estadística para rechazar la hipótesis nula que la serie cronológica es lineal en la media, tanto para colones como dolares (Colones: Estadístico 0.4947052 , Valor P 0.7808653 ; Estadístico 1.4958362 , Valor P 0.473351 )
En la figura @ref(fig:variabilidadseries) se observa para el caso de colones una variabilidad estable a lo largo del periodo de análisis, por otro lado, los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en dolares se observa una variabilidad importante antes del año 2005, sin embargo, posterior a ese año tiende a estabilizarse.
variabilidad_crc <- log(actnetcrc)/log(stats::lag(actnetcrc,1))
variabilidad_usd <- log(actnetusd)/log(stats::lag(actnetusd,1))
pvariabilidad_crc<-autoplot(variabilidad_crc)+ theme_bw()+ ggtitle('Colones')+
scale_y_continuous(limits = c(0.75,1.1))
pvariabilidad_usd <- autoplot(variabilidad_usd)+ theme_bw()+ ggtitle('Dolares')+
scale_y_continuous(limits = c(0.75,1.1))
grid.arrange(pvariabilidad_crc,pvariabilidad_usd,nrow=1,ncol=2)
Evolución de la variabilidad de la serie cronológica de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en dolares, febrero 2001 a diciembre-2020
A partir del análisis exploratorio realizado de las series y considerando sus caracteristicas se procedió a estimar 5 modelos de pronóstico lineales por cada serie:
Modelo de Suavizamiento Exponencial Holt-Winter Aditivo
Modelo de Suavizamiento Exponencial Holt-Winter Multiplicativo
3 Modelos univariantes autorregresivos integrados de media movil (ARIMA)
## Peridos de Tiempo
inicio_train<- c(2011,1)
fin_train<- c(2020,12)
inicio_test <- c(2021,1)
sactnetcrc<- window(actnetcrc,start=inicio_train)
sactnetcrc_train<- window(actnetcrc,start=inicio_train, end=fin_train)
sactnetcrc_test<- window(actnetcrc,start=inicio_test)
sactnetusd<- window(actnetusd,start=inicio_train)
sactnetusd_train<- window(actnetusd,start=inicio_train, end=fin_train)
sactnetusd_test<- window(actnetusd,start=inicio_test)
h.param <- length(sactnetcrc_test)
ht2_multi <- hw(sactnetcrc_train,seasonal="multiplicative",h = h.param)
ht2_add <- hw(sactnetcrc_train,seasonal="additive",h = h.param)
# summary(ht2_multi)
# summary(ht2_add)
Metricas_HW<-data.frame(
Modelo=c(rep("Holt Winter Multiplicativo",2),rep("Holt Winter Aditivo",2)),
Dataset= rep(c("Entrenamiento","Prueba"),2),
rbind(h2multi=forecast::accuracy(ht2_multi,sactnetcrc_test),h2aditive=forecast::accuracy(ht2_add,sactnetcrc_test)))%>%
select(Modelo,Dataset,RMSE,MAE)%>%
mutate(MSE=(RMSE)^2)
##
## Call:
## seas(x = sactnetcrc_train, transform.function = "log", regression.aictest = NULL,
## outlier = NULL, regression.variables = "ao2020.Mar", arima.model = "(0 1 0)(1 0 1)")
##
## Coefficients:
## Estimate Std. Error z value Pr(>|z|)
## AO2020.Mar -0.26274 0.04680 -5.614 0.000000019789845 ***
## AR-Seasonal-12 0.92799 0.03349 27.706 < 0.0000000000000002 ***
## MA-Seasonal-12 0.61263 0.08556 7.160 0.000000000000804 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## SEATS adj. ARIMA: (0 1 0)(1 0 1) Obs.: 120 Transform: log
## AICc: 2881, BIC: 2892 QS (no seasonality in final): 0
## Box-Ljung (no autocorr.): 26 Shapiro (normality): 0.9731 *
## Messages generated by X-13:
## Warnings:
## - At least one visually significant trading day peak has been
## found in one or more of the estimated spectra.
##
## Call:
## seas(x = sactnetcrc_train, transform.function = "none", regression.aictest = NULL,
## outlier = NULL, regression.variables = "ao2020.Mar", arima.model = "(0 1 0)(0 1 1)")
##
## Coefficients:
## Estimate Std. Error z value Pr(>|z|)
## AO2020.Mar -201525.95298 29865.83599 -6.748 0.000000000015 ***
## MA-Seasonal-12 0.76655 0.08227 9.317 < 0.0000000000000002 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## SEATS adj. ARIMA: (0 1 0)(0 1 1) Obs.: 120 Transform: none
## AICc: 2599, BIC: 2607 QS (no seasonality in final): 0
## Box-Ljung (no autocorr.): 29.1 Shapiro (normality): 0.9737 *
## Messages generated by X-13:
## Warnings:
## - At least one visually significant trading day peak has been
## found in one or more of the estimated spectra.
##
## Call:
## seas(x = sactnetcrc_train, transform.function = "log", regression.aictest = NULL,
## outlier = NULL, regression.variables = c("ls2015.May", "ao2020.Mar"),
## arima.model = "(2 1 0)(1 0 0)")
##
## Coefficients:
## Estimate Std. Error z value Pr(>|z|)
## LS2015.May 0.21723 0.05403 4.021 0.0000579552508 ***
## AO2020.Mar -0.24128 0.04815 -5.011 0.0000005425861 ***
## AR-Nonseasonal-01 -0.15083 0.08874 -1.700 0.08918 .
## AR-Nonseasonal-02 -0.26087 0.08985 -2.904 0.00369 **
## AR-Seasonal-12 0.55845 0.07922 7.050 0.0000000000018 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## SEATS adj. ARIMA: (2 1 0)(1 0 0) Obs.: 120 Transform: log
## AICc: 2875, BIC: 2891 QS (no seasonality in final): 0
## Box-Ljung (no autocorr.): 20.61 Shapiro (normality): 0.9831
#### Estacionariedad de los residuos
## Media Constante
adf_res_CRC_1<- adf.test(resseas1 , alternative='stationary')
## Warning in adf.test(resseas1, alternative = "stationary"): p-value smaller than
## printed p-value
adf_res_CRC_2<- adf.test(resseas2 , alternative='stationary')
## Warning in adf.test(resseas2, alternative = "stationary"): p-value smaller than
## printed p-value
adf_res_CRC_3<- adf.test(resseas3 , alternative='stationary')
## Warning in adf.test(resseas3, alternative = "stationary"): p-value smaller than
## printed p-value
# adf_res_CRC_1
adf_res_CRC_2
##
## Augmented Dickey-Fuller Test
##
## data: resseas2
## Dickey-Fuller = -5.3808, Lag order = 4, p-value = 0.01
## alternative hypothesis: stationary
# adf_res_CRC_3
#### Autocorrelaciòn de los resiudos
#####################################
#Autocorrelacion de los residuales y pruebas gráficas
## Ljung-Box test
# H0: Independencia de los residuos
# H1: No Independencia de los residuos
# lb_res_CRC_1 <- checkresiduals(modelseas1 , lag=MaxLag2)
lb_res_CRC_2 <- checkresiduals(modelseas2 , lag=MaxLag2)
# lb_res_CRC_3 <- checkresiduals(modelseas3 , lag=MaxLag2)
#### Varianza Constante de los residuos
## Varianza Constante ARCH Engle's Test for Residual Heteroscedasticity
# H0: los residuos son homocedasticos
# H1: los residuos no son homocedasticos
# FinTS::ArchTest(resseas1,lag=12)
FinTS::ArchTest(resseas2,lag=12)
##
## ARCH LM-test; Null hypothesis: no ARCH effects
##
## data: resseas2
## Chi-squared = 12.335, df = 12, p-value = 0.4192
# FinTS::ArchTest(resseas3,lag=12)
# autoplot(resseas1^2 )+ theme_bw() ; acf2(resseas1^2, max.lag=MaxLag2)
autoplot(resseas2^2 )+ theme_bw() ; acf2(resseas2^2, max.lag=MaxLag2)
## [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8] [,9] [,10] [,11] [,12] [,13]
## ACF 0.03 0.13 0.11 0.09 0.06 -0.03 0.22 -0.07 0.10 -0.03 0.00 0.02 0.12
## PACF 0.03 0.13 0.10 0.07 0.03 -0.07 0.21 -0.09 0.06 -0.05 -0.03 0.01 0.16
## [,14] [,15] [,16] [,17] [,18] [,19] [,20] [,21] [,22] [,23] [,24] [,25]
## ACF -0.01 -0.06 0.08 0.07 -0.01 0.01 0.04 -0.06 0.02 -0.11 0.15 -0.03
## PACF -0.09 -0.04 0.02 0.11 -0.03 0.01 -0.05 -0.04 0.02 -0.10 0.16 -0.03
## [,26] [,27] [,28] [,29] [,30] [,31] [,32] [,33] [,34] [,35] [,36] [,37]
## ACF 0.15 0.16 -0.03 0.14 -0.04 0.01 -0.08 0.12 0.01 -0.07 0.05 -0.03
## PACF 0.14 0.18 -0.04 0.03 -0.04 -0.13 -0.02 0.05 -0.01 -0.04 0.02 -0.02
## [,38] [,39] [,40] [,41] [,42] [,43] [,44] [,45] [,46] [,47] [,48] [,49]
## ACF 0.02 -0.06 0.11 -0.01 -0.02 -0.05 -0.08 -0.01 -0.09 -0.05 -0.06 -0.03
## PACF 0.05 -0.04 0.05 0.02 -0.05 -0.13 -0.06 -0.02 -0.10 0.03 0.02 0.04
## [,50] [,51] [,52] [,53] [,54] [,55] [,56] [,57] [,58] [,59] [,60] [,61]
## ACF 0.02 -0.01 0.01 0.11 0.14 0.02 0.06 0.02 -0.07 0.03 0.01 -0.05
## PACF 0.11 -0.01 0.03 0.04 0.12 0.00 0.05 -0.09 -0.14 0.07 -0.02 -0.03
# autoplot(resseas3^2 )+ theme_bw() ; acf2(resseas3^2, max.lag=MaxLag2)
#### Normalidad de los residuos
#####################################
#Normalidad de los residuales
# H0: La muestra proviene de una distribución normal.
# H1: La muestra no proviene de una distribución normal.
## Jarque Bera
jb_res_CRC_1<-jarque.bera.test(resseas1) # Cumple
jb_res_CRC_2<-jarque.bera.test(resseas2) # Cumple
jb_res_CRC_3<-jarque.bera.test(resseas3) # Cumple
# jb_res_CRC_1
jb_res_CRC_2
##
## Jarque Bera Test
##
## data: resseas2
## X-squared = 4.5603, df = 2, p-value = 0.1023
# jb_res_CRC_3
sw_res_CRC_1<-shapiro.test(resseas1) # Cumple
sw_res_CRC_2<-shapiro.test(resseas2) # Cumple
sw_res_CRC_3<-shapiro.test(resseas3) # Cumple
# sw_res_CRC_1
sw_res_CRC_2
##
## Shapiro-Wilk normality test
##
## data: resseas2
## W = 0.97374, p-value = 0.01968
# sw_res_CRC_3
# car::qqPlot(resseas1)
# car::qqPlot(resseas2)
# car::qqPlot(resseas3)
pronostico_CRC_1 <-window(series(modelseas1,"forecast.forecasts"),start=inicio_test,end=c(2021,7))
## specs have been added to the model: forecast
pronostico_CRC_2 <-window(series(modelseas2,"forecast.forecasts"),start=inicio_test,end=c(2021,7))
## specs have been added to the model: forecast
pronostico_CRC_3 <-window(series(modelseas3,"forecast.forecasts"),start=inicio_test,end=c(2021,7))
## specs have been added to the model: forecast
pronostico_CRC_1_train <- final(modelseas1)
pronostico_CRC_2_train <- final(modelseas2)
pronostico_CRC_3_train <- final(modelseas3)
perfor_crc_train_mod_1 <- getPerformance(pronostico_CRC_1_train, sactnetcrc_train)
perfor_crc_train_mod_2 <- getPerformance(pronostico_CRC_2_train, sactnetcrc_train)
perfor_crc_train_mod_3 <- getPerformance(pronostico_CRC_3_train, sactnetcrc_train)
perfor_crc_test_mod_1 <- getPerformance(pronostico_CRC_1[,1],sactnetcrc_test)
perfor_crc_test_mod_2 <- getPerformance(pronostico_CRC_2[,1],sactnetcrc_test)
perfor_crc_test_mod_3 <- getPerformance(pronostico_CRC_3[,1],sactnetcrc_test)
Metricas_Sarima_CRC <- data.frame(
Modelo = rep(
c(
"1. ARIMA (0 1 0)(1 0 1) Log",
"2. ARIMA (0 1 0)(0 1 1) Niveles",
"3. ARIMA (2 1 0)(1 0 0) Log"
),
2
),
Dataset = c(rep("Entrenamiento", 3), rep("Prueba", 3)),
rbind(
perfor_crc_train_mod_1,
perfor_crc_train_mod_2,
perfor_crc_train_mod_3,
perfor_crc_test_mod_1,
perfor_crc_test_mod_2,
perfor_crc_test_mod_3
)
)
Metricas_Mod_Lin<- bind_rows(Metricas_HW,Metricas_Sarima_CRC)
#Metricas_Mod_Lin<- Metricas_Sarima_CRC
knitr::kable(Metricas_Mod_Lin)
| Modelo | Dataset | RMSE | MAE | MSE | RSS | |
|---|---|---|---|---|---|---|
| Training.set | Holt Winter Multiplicativo | Entrenamiento | 46143.72 | 36530.25 | 2129242748 | NA |
| Test.set | Holt Winter Multiplicativo | Prueba | 234472.19 | 229925.73 | 54977208163 | NA |
| Training.set.1 | Holt Winter Aditivo | Entrenamiento | 43672.50 | 32822.04 | 1907286839 | NA |
| Test.set.1 | Holt Winter Aditivo | Prueba | 75931.71 | 65612.41 | 5765624923 | NA |
| …5 | 1. ARIMA (0 1 0)(1 0 1) Log | Entrenamiento | 29205.45 | 21349.58 | 852958462 | 102355015470 |
| …6 | 2. ARIMA (0 1 0)(0 1 1) Niveles | Entrenamiento | 29553.28 | 22021.57 | 873396193 | 104807543204 |
| …7 | 3. ARIMA (2 1 0)(1 0 0) Log | Entrenamiento | 37364.97 | 28987.47 | 1396141228 | 167536947352 |
| …8 | 1. ARIMA (0 1 0)(1 0 1) Log | Prueba | 98555.73 | 84387.87 | 9713231628 | 67992621397 |
| …9 | 2. ARIMA (0 1 0)(0 1 1) Niveles | Prueba | 72550.27 | 59411.16 | 5263541548 | 36844790837 |
| …10 | 3. ARIMA (2 1 0)(1 0 0) Log | Prueba | 139611.05 | 130079.25 | 19491245113 | 136438715794 |
ggplot(Metricas_Mod_Lin) +
aes(x = Modelo, fill = Dataset, weight = RMSE) +
geom_bar() +
scale_fill_manual(values = c(Entrenamiento = "#E69999",
Prueba = "#5C7FA7")) +
labs(x = "Método", y = "RMSE") +
coord_flip() +
theme_minimal() +
theme(legend.position = "none") +
facet_wrap(vars(Dataset), scales = "free", ncol = 1L)
Raíz del Error Cuadratico Medio de los Modelos Lineales según set de datos (entrenamiento y prueba) para la serie de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en Colones
### Holt-Winter Aditivo
# ht2_add_all <- hw(sactnetcrc,seasonal="additive",h = 5)
# autoplot(ht2_add_all)+
# theme_bw()
### Arima (2,1,0) (1,0,1) Log
modelseas2_all<-seas(
x = sactnetcrc_train,
transform.function = "none",
regression.aictest = NULL,
outlier = NULL,
regression.variables = "ao2020.Mar",
arima.model = "(0 1 0)(0 1 1)"
)
autoplot(sactnetcrc)+
autolayer(window(series(modelseas2_all,"forecast.forecasts"),start=c(2021,8),end=c(2021,12)))+
theme_bw()
## specs have been added to the model: forecast
## For a multivariate time series, specify a seriesname for each time series. Defaulting to column names.
ht2_multi_usd <- hw(sactnetusd_train,seasonal="multiplicative",h = h.param)
ht2_add_usd <- hw(sactnetusd_train,seasonal="additive",h = h.param)
summary(ht2_multi_usd)
summary(ht2_add_usd)
Metricas_HW_usd<-data.frame(
Modelo=c(rep("Holt Winter Multiplicativo",2),rep("Holt Winter Aditivo",2)),
Dataset= rep(c("Entrenamiento","Prueba"),2),
rbind(h2multi=forecast::accuracy(ht2_multi_usd,sactnetusd_test),h2aditive=forecast::accuracy(ht2_add_usd,sactnetusd_test)))%>%
select(Modelo,Dataset,RMSE,MAE)%>%
mutate(MSE=(RMSE)^2)
##
## Call:
## seas(x = sactnetcrc_train, transform.function = "none", regression.aictest = NULL,
## outlier = NULL, regression.variables = "ao2020.Mar", arima.model = "(0 1 0)(0 1 1)")
##
## Coefficients:
## Estimate Std. Error z value Pr(>|z|)
## AO2020.Mar -201525.95298 29865.83599 -6.748 0.000000000015 ***
## MA-Seasonal-12 0.76655 0.08227 9.317 < 0.0000000000000002 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## SEATS adj. ARIMA: (0 1 0)(0 1 1) Obs.: 120 Transform: none
## AICc: 2599, BIC: 2607 QS (no seasonality in final): 0
## Box-Ljung (no autocorr.): 29.1 Shapiro (normality): 0.9737 *
## Messages generated by X-13:
## Warnings:
## - At least one visually significant trading day peak has been
## found in one or more of the estimated spectra.
#### Estacionariedad de los residuos
## Media Constante
adf_res_usd_1<- adf.test(resseas1_usd , alternative='stationary')
## Warning in adf.test(resseas1_usd, alternative = "stationary"): p-value smaller
## than printed p-value
adf_res_usd_2<- adf.test(resseas2_usd , alternative='stationary')
## Warning in adf.test(resseas2_usd, alternative = "stationary"): p-value smaller
## than printed p-value
adf_res_usd_3<- adf.test(resseas3_usd , alternative='stationary')
## Warning in adf.test(resseas3_usd, alternative = "stationary"): p-value smaller
## than printed p-value
# adf_res_usd_1
adf_res_usd_2
##
## Augmented Dickey-Fuller Test
##
## data: resseas2_usd
## Dickey-Fuller = -4.9025, Lag order = 4, p-value = 0.01
## alternative hypothesis: stationary
# adf_res_usd_3
#### Autocorrelaciòn de los resiudos
#####################################
#Autocorrelacion de los residuales y pruebas gráficas
## Ljung-Box test
# H0: Independencia de los residuos
# H1: No Independencia de los residuos
# lb_res_usd_1 <- checkresiduals(modelseas1_usd , lag=MaxLag2)
lb_res_usd_2 <- checkresiduals(modelseas2_usd , lag=MaxLag2)
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.
# lb_res_usd_3 <- checkresiduals(modelseas3_usd , lag=MaxLag2)
#####################################
#Normalidad de los residuales
# H0: La muestra proviene de una distribución normal.
# H1: La muestra no proviene de una distribución normal.
## Jarque Bera
jb_res_usd_1<-jarque.bera.test(resseas1_usd) # Cumple
jb_res_usd_2<-jarque.bera.test(resseas2_usd) # Cumple
jb_res_usd_3<-jarque.bera.test(resseas3_usd) # Cumple
# jb_res_usd_1
jb_res_usd_2
##
## Jarque Bera Test
##
## data: resseas2_usd
## X-squared = 0.39006, df = 2, p-value = 0.8228
# jb_res_usd_3
sw_res_usd_1<-shapiro.test(resseas1_usd) # Cumple
sw_res_usd_2<-shapiro.test(resseas2_usd) # Cumple
sw_res_usd_3<-shapiro.test(resseas3_usd) # Cumple
# sw_res_usd_1
sw_res_usd_2
##
## Shapiro-Wilk normality test
##
## data: resseas2_usd
## W = 0.9938, p-value = 0.876
# sw_res_usd_3
# car::qqPlot(resseas1_usd)
car::qqPlot(resseas2_usd)
## [1] 29 56
# car::qqPlot(resseas3_usd)
#### Varianza Constante de los residuos
## Varianza Constante ARCH Engle's Test for Residual Heteroscedasticity
# H0: los residuos son homocedasticos
# H1: los residuos no son homocedasticos
# FinTS::ArchTest(resseas1_usd,lag=12)
# FinTS::ArchTest(resseas2_usd,lag=12)
FinTS::ArchTest(resseas3_usd,lag=12)
##
## ARCH LM-test; Null hypothesis: no ARCH effects
##
## data: resseas3_usd
## Chi-squared = 7.3805, df = 12, p-value = 0.8315
# autoplot(resseas1_usd^2 )+ theme_bw(); acf2(resseas1_usd^2, max.lag=MaxLag2)
autoplot(resseas2_usd^2 )+ theme_bw(); acf2(resseas2_usd^2, max.lag=MaxLag2)
## [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8] [,9] [,10] [,11] [,12] [,13]
## ACF -0.15 0.10 0.03 -0.01 -0.02 0.12 -0.14 0.09 -0.09 0.00 0.09 -0.12 0.04
## PACF -0.15 0.08 0.05 -0.01 -0.03 0.12 -0.10 0.03 -0.06 -0.02 0.10 -0.11 0.02
## [,14] [,15] [,16] [,17] [,18] [,19] [,20] [,21] [,22] [,23] [,24] [,25]
## ACF -0.05 0.00 -0.11 -0.08 -0.08 -0.04 -0.06 0.05 -0.02 -0.01 -0.04 0.08
## PACF -0.06 0.02 -0.13 -0.14 -0.05 -0.08 -0.02 0.01 0.03 -0.01 -0.07 0.08
## [,26] [,27] [,28] [,29] [,30] [,31] [,32] [,33] [,34] [,35] [,36] [,37]
## ACF -0.05 0.15 -0.13 0.11 -0.05 0.03 -0.03 0.17 0.00 0.15 -0.04 0.02
## PACF -0.05 0.16 -0.12 0.07 -0.02 -0.02 -0.03 0.09 0.11 0.06 -0.02 -0.03
## [,38] [,39] [,40] [,41] [,42] [,43] [,44] [,45] [,46] [,47] [,48] [,49]
## ACF 0.02 -0.01 -0.06 -0.04 0.01 -0.10 0.05 -0.10 -0.01 -0.11 0.09 -0.13
## PACF 0.01 0.03 -0.13 -0.03 0.03 -0.04 0.01 -0.07 -0.01 -0.12 0.06 -0.07
## [,50] [,51] [,52] [,53] [,54] [,55] [,56] [,57] [,58] [,59] [,60] [,61]
## ACF 0.00 -0.07 0.05 -0.07 0.04 -0.05 0.11 0.02 -0.06 0.14 -0.06 -0.06
## PACF -0.02 0.04 0.03 0.02 -0.04 0.01 0.07 0.07 -0.10 0.10 -0.08 -0.09
# autoplot(resseas3_usd^2 )+ theme_bw(); acf2(resseas3_usd^2, max.lag=MaxLag2)
### Performance de los modelos
pronostico_usd_1 <- window(series(modelseas1_usd,"forecast.forecasts"),start=inicio_test,end=c(2021,7))
## specs have been added to the model: forecast
pronostico_usd_2 <- window(series(modelseas2_usd,"forecast.forecasts"),start=inicio_test,end=c(2021,7))
## specs have been added to the model: forecast
pronostico_usd_3 <- window(series(modelseas3_usd,"forecast.forecasts"),start=inicio_test,end=c(2021,7))
## specs have been added to the model: forecast
pronostico_usd_1_train <- final(modelseas1_usd)
pronostico_usd_2_train <- final(modelseas2_usd)
pronostico_usd_3_train <- final(modelseas3_usd)
perfor_usd_train_mod_1 <- getPerformance(pronostico_usd_1_train, sactnetusd_train)
perfor_usd_train_mod_2 <- getPerformance(pronostico_usd_2_train, sactnetusd_train)
perfor_usd_train_mod_3 <- getPerformance(pronostico_usd_3_train, sactnetusd_train)
perfor_usd_test_mod_1 <- getPerformance(pronostico_usd_1[,1],sactnetusd_test)
perfor_usd_test_mod_2 <- getPerformance(pronostico_usd_2[,1],sactnetusd_test)
perfor_usd_test_mod_3 <- getPerformance(pronostico_usd_3[,1],sactnetusd_test)
Metricas_Sarima_usd <- data.frame(
Modelo = rep(
c(
"1.ARIMA (0 1 1)(1 1 0) Niveles",
"2.ARIMA (0 1 1)(0 1 1) Log",
"3.ARIMA (0 1 1)(0 1 1) Niveles"
),
2
),
Dataset = c(rep("Entrenamiento", 3), rep("Prueba", 3)),
rbind(
perfor_usd_train_mod_1,
perfor_usd_train_mod_2,
perfor_usd_train_mod_3,
perfor_usd_test_mod_1,
perfor_usd_test_mod_2,
perfor_usd_test_mod_3
)
)
Metricas_Mod_Lin<- bind_rows(Metricas_HW_usd,Metricas_Sarima_usd)
#Metricas_Mod_Lin<- Metricas_Sarima_usd
ggplot(Metricas_Mod_Lin) +
aes(x = Modelo, fill = Dataset, weight = RMSE) +
geom_bar() +
scale_fill_manual(values = c(Entrenamiento = "#E69999",
Prueba = "#5C7FA7")) +
labs(x = "Método", y = "RMSE", title = "Colones") +
coord_flip() +
theme_minimal() +
theme(legend.position = "none") +
facet_wrap(vars(Dataset), scales = "free", ncol = 1L)
### Holt-Winter MULTIPLICATUVI
# ht2_multi_all <- hw(sactnetusd,seasonal="multiplicative",h = 5)
# autoplot(ht2_multi_all)+
# theme_bw()
### Arima (2,1,0) (1,0,1) Log
modelseas2_all<-seas(
x = sactnetusd,
transform.function = "log",
regression.aictest = NULL,
outlier = NULL,
arima.model = "(0 1 1)(0 1 1)"
)
autoplot(sactnetusd)+
autolayer(window(series(modelseas2_all,"forecast.forecasts"),start=c(2021,8),end=c(2021,12)))+
theme_bw()
## specs have been added to the model: forecast
## For a multivariate time series, specify a seriesname for each time series. Defaulting to column names.
# m orden
pm <- 1:3
mod.list.tar<-list()
AIC.best.list<-list()
AICM = NULL
model.best <- list(d=0, p1=0, p2=0)
AIC.best = 2888
for(l in pm){
for(j in pm){
for(i in pm){
set.seed(777)
model.tar.s = tar(sactnetcrc_train,p1=j,p2=i,d=l)
mod.list.tar[[paste(j,i,l,sep="-")]]<-model.tar.s$AIC
#print(paste(j,i,l,sep="-"))
if (model.tar.s$AIC < AIC.best) {
AIC.best = model.tar.s$AIC
AIC.best.list[[paste(j,i,l,sep="-")]]<-AIC.best
#print(AIC.best)
model.best$d = l
model.best$p1 = model.tar.s$p1
model.best$p2 = model.tar.s$p2
print(paste(model.tar.s$p1,model.tar.s$p2,l,sep="-")) }
}
}
}
# AICTar<-bind_rows(mod.list.tar,.id = "Ordene-delay")%>%
# arrange(`1`)
#
# knitr::kable(head(AICTar,20))
AICTarBest<-bind_rows(AIC.best.list,.id = "Ordene-delay")%>%
arrange(`1`)
knitr::kable(head(AICTarBest,20))
mod.tar1<-TSA::tar(sactnetcrc_train,p1=2,p2=3,d=1)
mod.tar2<-TSA::tar(sactnetcrc_train,p1=3,p2=1,d=1)
mod.tar3<-TSA::tar(sactnetcrc_train,p1=3,p2=2,d=1)
mod.tar1$thd
##
## 778351.5
mod.tar2$thd
##
## 634783.1
mod.tar3$thd
##
## 778351.5
mod.tar1$qr1$coefficients
## intercept-sactnetcrc_train lag1-sactnetcrc_train
## 11332.3945310 0.8135618
## lag2-sactnetcrc_train
## 0.1841392
mod.tar2$qr1$coefficients
## intercept-sactnetcrc_train lag1-sactnetcrc_train
## 21868.4352666 0.7499834
## lag2-sactnetcrc_train lag3-sactnetcrc_train
## -0.3430847 0.5609114
mod.tar3$qr1$coefficients
## intercept-sactnetcrc_train lag1-sactnetcrc_train
## 4895.78533430 0.78566926
## lag2-sactnetcrc_train lag3-sactnetcrc_train
## -0.07458825 0.29804232
mod.tar1$qr2$coefficients
## intercept-sactnetcrc_train lag1-sactnetcrc_train
## -285683.849096 2.250800
## lag2-sactnetcrc_train
## -1.029562
mod.tar2$qr2$coefficients
## intercept-sactnetcrc_train lag1-sactnetcrc_train
## 91802.3504991 0.8826022
mod.tar3$qr2$coefficients
## intercept-sactnetcrc_train lag1-sactnetcrc_train
## -285683.849096 2.250800
## lag2-sactnetcrc_train
## -1.029562
cbind(
Modelo=c("p1=2,p2=3,d=1",
"p1=3,p2=1,d=1",
"p1=3,p2=2,d=1"),
AIC=c(mod.tar1$AIC,
mod.tar2$AIC,
mod.tar3$AIC))%>%
knitr::kable()
| Modelo | AIC | |
|---|---|---|
| 1 | p1=2,p2=3,d=1 | 2880 |
| 1 | p1=3,p2=1,d=1 | 2878 |
| 1 | p1=3,p2=2,d=1 | 2873 |
#tsdiag(mod.tar1)
tsdiag(mod.tar2)
#tsdiag(mod.tar3)
checkresiduals(ts(mod.tar1$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.
checkresiduals(ts(mod.tar2$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.
checkresiduals(ts(mod.tar3$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.
prontar1<- ts(as.vector(predict(mod.tar1,n.ahead=7,n.sim=1000)$fit),start=c(2021,1),frequency = 12)
prontar2<- ts(as.vector(predict(mod.tar2,n.ahead=7,n.sim=1000)$fit),start=c(2021,1),frequency = 12)
prontar3<- ts(as.vector(predict(mod.tar3,n.ahead=7,n.sim=1000)$fit),start=c(2021,1),frequency = 12)
fit1<-ts(as.vector(mod.tar1$y)-as.vector(mod.tar1$residuals),start =c(2011,1),frequency = 12)
## Warning in as.vector(mod.tar1$y) - as.vector(mod.tar1$residuals): longer object
## length is not a multiple of shorter object length
fit2<-ts(sactnetcrc_train-mod.tar2$residuals,start =c(2011,1),frequency = 12)
## Warning in `-.default`(sactnetcrc_train, mod.tar2$residuals): longer object
## length is not a multiple of shorter object length
fit3<-ts(sactnetcrc_train-mod.tar3$residuals,start =c(2011,1),frequency = 12)
## Warning in `-.default`(sactnetcrc_train, mod.tar3$residuals): longer object
## length is not a multiple of shorter object length
autoplot(sactnetcrc_train)+
autolayer(fit1)+
autolayer(fit2)+
autolayer(fit3)+
theme_bw()
Metrics::rmse(sactnetcrc_test, prontar1)
## [1] 288035.2
Metrics::rmse(sactnetcrc_test, prontar2)
## [1] 142518.6
Metrics::rmse(sactnetcrc_test, prontar3)
## [1] 254416.6
autoplot(sactnetcrc_test)+
autolayer(prontar1)+
autolayer(prontar2)+
autolayer(prontar3)+
theme_bw()+
scale_y_continuous(limits = c(500000,1400000))
Thus the threshold delay, the number of lags in each regime and the threshold value are computed.
Setar1 <-
selectSETAR(
sactnetcrc_train,
include = c("const", "trend","none", "both"),
m = 3,
thDelay = seq(1, 2, by = 1),
nthresh = 2,
criterion = "AIC",
type = "level",
plot = T,
trace = T
)
## Using maximum autoregressive order for low regime: mL = 3
## Using maximum autoregressive order for high regime: mH = 3
## Using maximum autoregressive order for middle regime: mM = 3
## Searching on 81 possible threshold values within regimes with sufficient ( 15% ) number of observations
## Searching on 1458 combinations of thresholds ( 81 ), thDelay ( 2 ), mL ( 3 ) and MM ( 3 )
##
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5Result of the one threshold search:
## -Thresh: 730317.6 -Delay: 2 -mL: 3 -mH: 1 - AIC 2618.596
## Second best: 615605.6 (conditionnal on th= 730317.6 and Delay= 2 ) SSR/AIC: 2618.562
##
## Trim not respected: 0.4358974 0.4273504 0.1367521 from 615605.6 770561.5Second best: 730317.6 (conditionnal on th= 615605.6 and Delay= 2 ) SSR/AIC: 2618.562
## Second best: 615605.6 (conditionnal on th= 730317.6 and Delay= 2 ) SSR/AIC: 2620.497
##
## Trim not respected: 0.4358974 0.4273504 0.1367521 from 615605.6 770561.5Second best: 730317.6 (conditionnal on th= 615605.6 and Delay= 2 ) SSR/AIC: 2620.497
## Second best: 695528 (conditionnal on th= 730317.6 and Delay= 2 ) SSR/AIC: 2622.225
##
## Trim not respected: 0.5897436 0.2735043 0.1367521 from 695528 770561.5Second best: 730317.6 (conditionnal on th= 695528 and Delay= 2 ) SSR/AIC: 2622.225
Setar2 <-
selectSETAR(
sactnetcrc_train,
m = 3,
d=2,
thDelay = seq(1, 2, by = 1),
nthresh = 2,
criterion = "AIC",
type = "level",
plot = T,
trace = T
)
## Using maximum autoregressive order for low regime: mL = 3
## Using maximum autoregressive order for high regime: mH = 3
## Using maximum autoregressive order for middle regime: mM = 3
## Searching on 78 possible threshold values within regimes with sufficient ( 15% ) number of observations
## Searching on 1404 combinations of thresholds ( 78 ), thDelay ( 2 ), mL ( 3 ) and MM ( 3 )
##
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6Result of the one threshold search:
## -Thresh: 743290.3 -Delay: 2 -mL: 2 -mH: 3 - AIC 2673.823
## Second best: 701399.4 (conditionnal on th= 743290.3 and Delay= 2 ) SSR/AIC: 2685.326
##
## Trim not respected: 0.6315789 0.2368421 0.1315789 from 701399.4 766323.9
## Trim not respected: 0.6315789 0.245614 0.122807 from 701399.4 768336.6Second best: 529852.4 (conditionnal on th= 701399.4 and Delay= 2 ) SSR/AIC: 2681.539
## Second best: 521049.7 (conditionnal on th= 743290.3 and Delay= 2 ) SSR/AIC: 2678.796
##
## Trim not respected: 0.2982456 0.5701754 0.1315789 from 521049.7 766323.9
## Trim not respected: 0.2982456 0.5789474 0.122807 from 521049.7 768336.6Second best: 748409.3 (conditionnal on th= 521049.7 and Delay= 2 ) SSR/AIC: 2678.771
## Second best: 695643.6 (conditionnal on th= 743290.3 and Delay= 2 ) SSR/AIC: 2678.939
##
## Trim not respected: 0.6140351 0.254386 0.1315789 from 695643.6 766323.9
## Trim not respected: 0.6140351 0.2631579 0.122807 from 695643.6 768336.6Second best: 743290.3 (conditionnal on th= 695643.6 and Delay= 2 ) SSR/AIC: 2678.939
Setar3 <-
selectSETAR(
sactnetcrc_train,
m = 3,
thDelay = seq(0, 2, by = 1),
nthresh = 1,
d = 1,
criterion = "AIC",
type = "level",
plot = T,
trace = T
)
## Using maximum autoregressive order for low regime: mL = 3
## Using maximum autoregressive order for high regime: mH = 3
## Searching on 81 possible threshold values within regimes with sufficient ( 15% ) number of observations
## Searching on 2187 combinations of thresholds ( 81 ), thDelay ( 3 ), mL ( 3 ) and MM ( 3 )
##
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## 1 T: Trim not respected: 0.8547009 0.1452991 from th: 770561.5
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
Setar4 <-
selectSETAR(
sactnetcrc_train,
m = 3,
thDelay = seq(0, 2, by = 1),
nthresh = 1,
d = 2,
criterion = "AIC",
type = "level",
plot = T,
trace = T
)
## Using maximum autoregressive order for low regime: mL = 3
## Using maximum autoregressive order for high regime: mH = 3
## Searching on 78 possible threshold values within regimes with sufficient ( 15% ) number of observations
## Searching on 2106 combinations of thresholds ( 78 ), thDelay ( 3 ), mL ( 3 ) and MM ( 3 )
##
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 768336.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
Setar1$allTh%>%
as.data.frame()%>%
arrange(AIC,thDelay,mL,mH)%>%
head(5)
## thDelay mL mH th AIC
## 1 2 3 1 730317.6 2618.596
## 2 2 3 1 748435.4 2618.740
## 3 2 3 1 733818.8 2618.772
## 4 2 3 1 748409.3 2618.774
## 5 2 3 1 737894.9 2618.840
Setar2$allTh%>%
as.data.frame()%>%
arrange(AIC,thDelay,mL,mH)%>%
head(5)
## thDelay mL mH th AIC
## 1 2 2 3 743290.3 2673.823
## 2 2 2 3 748409.3 2673.909
## 3 2 3 3 743290.3 2675.819
## 4 2 3 3 748409.3 2675.909
## 5 2 2 1 763482.7 2676.310
Setar3$allTh%>%
as.data.frame()%>%
arrange(AIC,thDelay,mL,mH)%>%
head(5)
## thDelay mL mH th AIC
## 1 0 3 2 766323.9 2607.529
## 2 0 3 3 766323.9 2607.907
## 3 0 3 2 763482.7 2608.894
## 4 0 3 2 761075.9 2608.935
## 5 0 3 3 763482.7 2609.152
Setar4$allTh%>%
as.data.frame()%>%
arrange(AIC,thDelay,mL,mH)%>%
head(5)
## thDelay mL mH th AIC
## 1 0 2 1 765999.6 2672.119
## 2 0 3 1 765999.6 2673.514
## 3 2 2 3 743290.3 2673.823
## 4 0 2 1 768336.6 2673.825
## 5 0 2 1 763482.7 2673.834
modeloas1 <-
setar(
sactnetcrc_train,
m = 3,
mL = 3,
mH = 1,
d=1,
nthresh = 1,
thDelay = 2,
type = "level"
)
##
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 770561.5
## Warning: Possible unit root in the low regime. Roots are: 0.9958 1.4296 1.4296
## Raiz Unitaria
summary(modeloas1) #residuals variance = 0.005525, AIC = -632, MAPE = 0.4352%
##
## Non linear autoregressive model
##
## SETAR model ( 2 regimes)
## Coefficients:
## Low regime:
## const.L phiL.1 phiL.2 phiL.3
## 6788.2145658 0.8298233 -0.3141401 0.4914032
##
## High regime:
## const.H phiH.1
## 62580.5911109 0.9301118
##
## Threshold:
## -Variable: Z(t) = + (0) X(t)+ (0)X(t-1)+ (1)X(t-2)
## -Value: 730318
## Proportion of points in low regime: 74.36% High regime: 25.64%
##
## Residuals:
## Min 1Q Median 3Q Max
## -153164.6 -31930.4 -1978.2 28529.5 169203.7
##
## Fit:
## residuals variance = 2668998775, AIC = 2619, MAPE = 6.397%
##
## Coefficient(s):
##
## Estimate Std. Error t value Pr(>|t|)
## const.L 6788.214566 26284.175383 0.2583 0.79667
## phiL.1 0.829823 0.122599 6.7686 0.0000000005923 ***
## phiL.2 -0.314140 0.161357 -1.9469 0.05401 .
## phiL.3 0.491403 0.119418 4.1150 0.0000734657575 ***
## const.H 62580.591111 56979.088286 1.0983 0.27438
## phiH.1 0.930112 0.069313 13.4190 < 0.00000000000000022 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Threshold
## Variable: Z(t) = + (0) X(t) + (0) X(t-1)+ (1) X(t-2)
##
## Value: 730318
# plot(modeloas1)
checkresiduals(ts(modeloas1$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.
modeloas2 <-
setar(
sactnetcrc_train,
m = 3,
mL = 2,
mH = 3,
d=2,
nthresh = 1,
thDelay = 2,
type = "level"
)
##
## 1 T: Trim not respected: 0.8508772 0.1491228 from th: 763482.7
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 765999.6
## 1 T: Trim not respected: 0.877193 0.122807 from th: 768336.6
## Warning: Possible unit root in the high regime. Roots are: 0.8544 1.0679 1.0679
## Warning: Possible unit root in the low regime. Roots are: 0.9985 1.9337
## Raiz Unitaria
summary(modeloas2) # residuals variance = 0.005857, AIC = -635, MAPE = 0.4584%
##
## Non linear autoregressive model
##
## SETAR model ( 2 regimes)
## Coefficients:
## Low regime:
## const.L phiL.1 phiL.2
## 15969.0293198 0.4843835 0.5179294
##
## High regime:
## const.H phiH.1 phiH.2 phiH.3
## -345928.6511360 0.5219453 -0.1177826 1.0262792
##
## Threshold:
## -Variable: Z(t) = + (0) X(t)+ (0)X(t-1)+ (1)X(t-2)
## -Value: 743290
## Proportion of points in low regime: 79.82% High regime: 20.18%
##
## Residuals:
## Min 1Q Median 3Q Max
## -173908 -41385 -2497 44119 191478
##
## Fit:
## residuals variance = 4158948497, AIC = 2674, MAPE = 8.046%
##
## Coefficient(s):
##
## Estimate Std. Error t value Pr(>|t|)
## const.L 15969.02932 29654.94950 0.5385 0.59129
## phiL.1 0.48438 0.10049 4.8202 0.000004503 ***
## phiL.2 0.51793 0.10345 5.0064 0.000002059 ***
## const.H -345928.65114 228833.80007 -1.5117 0.13340
## phiH.1 0.52195 0.20755 2.5147 0.01332 *
## phiH.2 -0.11778 0.20588 -0.5721 0.56840
## phiH.3 1.02628 0.39780 2.5799 0.01117 *
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Threshold
## Variable: Z(t) = + (0) X(t) + (0) X(t-1)+ (1) X(t-2)
##
## Value: 743290
# plot(modeloas2)
checkresiduals(ts(modeloas2$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.
modeloas3 <-
setar(
sactnetcrc_train,
m = 3,
mL = 3,
mH = 2,
d=1,
nthresh = 1,
thDelay = 0,
type = "level"
)
## Warning: Possible unit root in the high regime. Roots are: 0.7777 1.5287
## Warning: Possible unit root in the low regime. Roots are: 0.9837 1.736 1.736
## Raiz Unitaria
summary(modeloas3) # residuals variance = 0.006319, AIC = -621, MAPE = 0.4621%
##
## Non linear autoregressive model
##
## SETAR model ( 2 regimes)
## Coefficients:
## Low regime:
## const.L phiL.1 phiL.2 phiL.3
## -3220.1735591 0.8068262 -0.1186698 0.3373163
##
## High regime:
## const.H phiH.1 phiH.2
## -153038.854747 1.939965 -0.841122
##
## Threshold:
## -Variable: Z(t) = + (1) X(t)+ (0)X(t-1)+ (0)X(t-2)
## -Value: 766324
## Proportion of points in low regime: 82.91% High regime: 17.09%
##
## Residuals:
## Min 1Q Median 3Q Max
## -195426.8 -26217.0 -6775.5 30310.5 159633.0
##
## Fit:
## residuals variance = 2393621738, AIC = 2608, MAPE = 6.077%
##
## Coefficient(s):
##
## Estimate Std. Error t value Pr(>|t|)
## const.L -3220.17356 23624.40976 -0.1363 0.8918211
## phiL.1 0.80683 0.10141 7.9561 0.000000000001500 ***
## phiL.2 -0.11867 0.13459 -0.8817 0.3798108
## phiL.3 0.33732 0.10782 3.1284 0.0022358 **
## const.H -153038.85475 82621.52690 -1.8523 0.0665944 .
## phiH.1 1.93996 0.25176 7.7058 0.000000000005452 ***
## phiH.2 -0.84112 0.22778 -3.6926 0.0003434 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Threshold
## Variable: Z(t) = + (1) X(t) + (0) X(t-1)+ (0) X(t-2)
##
## Value: 766324
# plot(modeloas3)
checkresiduals(ts(modeloas3$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.
modeloas4 <-
setar(
sactnetcrc_train,
m = 3,
mL = 1,
mH = 2,
d=2,
nthresh = 1,
thDelay = 0,
type = "level"
)
summary(modeloas4) # residuals variance = 0.006319, AIC = -621, MAPE = 0.4621%
##
## Non linear autoregressive model
##
## SETAR model ( 2 regimes)
## Coefficients:
## Low regime:
## const.L phiL.1
## 497047.101814 -0.245272
##
## High regime:
## const.H phiH.1 phiH.2
## 48728.0572066 0.6017097 0.3500140
##
## Threshold:
## -Variable: Z(t) = + (1) X(t)+ (0)X(t-1)+ (0)X(t-2)
## -Value: 438446
## Proportion of points in low regime: 17.54% High regime: 82.46%
##
## Residuals:
## Min 1Q Median 3Q Max
## -161045 -39211 -5795 36633 192910
##
## Fit:
## residuals variance = 4642154996, AIC = 2683, MAPE = 7.938%
##
## Coefficient(s):
##
## Estimate Std. Error t value Pr(>|t|)
## const.L 497047.101814 230952.873888 2.1522 0.0334753 *
## phiL.1 -0.245272 0.593230 -0.4135 0.6800454
## const.H 48728.057207 39998.284885 1.2183 0.2256210
## phiH.1 0.601710 0.092488 6.5058 0.000000002091 ***
## phiH.2 0.350014 0.098458 3.5550 0.0005498 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Threshold
## Variable: Z(t) = + (1) X(t) + (0) X(t-1)+ (0) X(t-2)
##
## Value: 438446
# plot(modeloas4)
checkresiduals(ts(modeloas4$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.
AIC(modeloas1)
## [1] 2618.596
AIC(modeloas2)
## [1] 2673.823
AIC(modeloas3)
## [1] 2607.529
AIC(modeloas4)
## [1] 2683.013
pronsetar1<- predict(modeloas1, n.ahead = 7)
pronsetar2<- predict(modeloas2, n.ahead = 7)
pronsetar3<- predict(modeloas3, n.ahead = 7)
pronsetar4<- predict(modeloas4, n.ahead = 7)
fit1<-ts(modeloas1$fitted.values,start =c(2011,1),frequency = 12)
fit2<-ts(modeloas2$fitted.values,start =c(2011,1),frequency = 12)
fit3<-ts(modeloas3$fitted.values,start =c(2011,1),frequency = 12)
fit4<-ts(modeloas4$fitted.values,start =c(2011,1),frequency = 12)
autoplot(sactnetcrc_train)+
autolayer(fit1)+
autolayer(fit2)+
autolayer(fit3)+
autolayer(fit4)+
theme_bw()
data.frame(
Modelo= c(
"1) m = 3,mL = 3,mH = 1, d=1",
"2) m = 3,mL = 2,mH = 3, d=2",
"3) m = 3,mL = 3,mH = 2, d=1",
"4) m = 3,mL = 1,mH = 2, d=2"
),
RMSE=c(
Metrics::rmse(sactnetcrc_test, pronsetar1),
Metrics::rmse(sactnetcrc_test, pronsetar2),
Metrics::rmse(sactnetcrc_test, pronsetar3),
Metrics::rmse(sactnetcrc_test, pronsetar4)))%>%
arrange(RMSE)%>%
knitr::kable()
| Modelo | RMSE |
|---|---|
| 4) m = 3,mL = 1,mH = 2, d=2 | 80934.83 |
| 1) m = 3,mL = 3,mH = 1, d=1 | 97545.73 |
| 2) m = 3,mL = 2,mH = 3, d=2 | 245685.71 |
| 3) m = 3,mL = 3,mH = 2, d=1 | 313915.21 |
autoplot(sactnetcrc_test)+
autolayer(pronsetar1)+
autolayer(pronsetar2)+
autolayer(pronsetar3)+
autolayer(pronsetar4)+
theme_bw()+
scale_y_continuous(limits = c(500000,1400000))
Metrics::rmse(sactnetcrc_test,(prontar2))
## [1] 142518.6
Metrics::rmse(sactnetcrc_test, (pronsetar4))
## [1] 80934.83
autoplot(sactnetcrc_test)+
autolayer(prontar2)+
autolayer(pronsetar4)+
theme_bw()+
scale_y_continuous(limits = c(500000,1400000))
# m orden
pm <- 1:4
mod.list.tar<-list()
AIC.best.list<-list()
AICM = NULL
model.best <- list(d=0, p1=0, p2=0)
AIC.best = 10000
for(l in pm){
for(j in pm){
for(i in pm){
set.seed(777)
model.tar.s = tar(sactnetusd_train,p1=j,p2=i,d=l)
mod.list.tar[[paste(j,i,l,sep="-")]]<-model.tar.s$AIC
print(paste("Modelo:",j,i,l,sep="-"))
if (model.tar.s$AIC < AIC.best) {
AIC.best = model.tar.s$AIC
AIC.best.list[[paste(j,i,l,sep="-")]]<-AIC.best
#print("Modelo:",j,i,l,"AIC",AIC.best)
model.best$d = l
model.best$p1 = model.tar.s$p1
model.best$p2 = model.tar.s$p2
print(paste(model.tar.s$p1,model.tar.s$p2,l,sep="-")) }
}
}
}
# AICTar<-bind_rows(mod.list.tar,.id = "Ordene-delay")%>%
# arrange(`1`)
#
# knitr::kable(head(AICTar,20))
AICTarBest<-bind_rows(AIC.best.list,.id = "Ordene-delay")%>%
arrange(`1`)
knitr::kable(head(AICTarBest,20))
mod.tar1.usd<-TSA::tar(sactnetusd_train,p1=3,p2=4,d=1)
mod.tar2.usd<-TSA::tar(sactnetusd_train,p1=1,p2=2,d=1)
mod.tar3.usd<-TSA::tar(sactnetusd_train,p1=1,p2=3,d=1)
mod.tar1.usd$thd
##
## 622.0209
mod.tar2.usd$thd
##
## 670.8907
mod.tar3.usd$thd
##
## 691.3097
mod.tar1.usd$qr1$coefficients
## intercept-sactnetusd_train lag1-sactnetusd_train
## 181.4332347 1.6151098
## lag2-sactnetusd_train lag3-sactnetusd_train
## -1.3676001 0.4599062
mod.tar2.usd$qr1$coefficients
## intercept-sactnetusd_train lag1-sactnetusd_train
## 166.0529509 0.7379237
mod.tar3.usd$qr1$coefficients
## intercept-sactnetusd_train lag1-sactnetusd_train
## 132.9783554 0.7962717
mod.tar1.usd$qr2$coefficients
## intercept-sactnetusd_train lag1-sactnetusd_train
## 65.0189187 0.9439411
mod.tar2.usd$qr2$coefficients
## intercept-sactnetusd_train lag1-sactnetusd_train
## 83.4616216 0.9278156
mod.tar3.usd$qr2$coefficients
## intercept-sactnetusd_train lag1-sactnetusd_train
## 108.7318363 0.9060299
data.frame(
Modelo=c("p1=3,p2=4,d=1",
"p1=1,p2=2,d=1",
"p1=1,p2=3,d=1"),
AIC=c(mod.tar1.usd$AIC,
mod.tar2.usd$AIC,
mod.tar3.usd$AIC))%>%
arrange(AIC)%>%
knitr::kable()
| Modelo | AIC |
|---|---|
| p1=3,p2=4,d=1 | 1323 |
| p1=1,p2=3,d=1 | 1346 |
| p1=1,p2=2,d=1 | 1357 |
tsdiag(mod.tar1.usd)
tsdiag(mod.tar2.usd)
tsdiag(mod.tar3.usd)
checkresiduals(ts(mod.tar1.usd$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.
checkresiduals(ts(mod.tar2.usd$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.
checkresiduals(ts(mod.tar3.usd$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.
prontar1.usd<- ts(as.vector(predict(mod.tar1.usd,n.ahead=7,n.sim=1000)$fit),start=c(2021,1),frequency = 12)
prontar2.usd<- ts(as.vector(predict(mod.tar2.usd,n.ahead=7,n.sim=1000)$fit),start=c(2021,1),frequency = 12)
prontar3.usd<- ts(as.vector(predict(mod.tar3.usd,n.ahead=7,n.sim=1000)$fit),start=c(2021,1),frequency = 12)
fit1.usd<-ts(as.vector(mod.tar1.usd$y)-as.vector(mod.tar1.usd$residuals),start =c(2011,1),frequency = 12)
## Warning in as.vector(mod.tar1.usd$y) - as.vector(mod.tar1.usd$residuals): longer
## object length is not a multiple of shorter object length
fit2.usd<-ts(as.vector(mod.tar1.usd$y)-mod.tar2.usd$residuals,start =c(2011,1),frequency = 12)
## Warning in as.vector(mod.tar1.usd$y) - mod.tar2.usd$residuals: longer object
## length is not a multiple of shorter object length
fit3.usd<-ts(as.vector(mod.tar1.usd$y)-mod.tar3.usd$residuals,start =c(2011,1),frequency = 12)
## Warning in as.vector(mod.tar1.usd$y) - mod.tar3.usd$residuals: longer object
## length is not a multiple of shorter object length
autoplot(sactnetusd_train)+
autolayer(fit1.usd)+
autolayer(fit2.usd)+
autolayer(fit3.usd)+
theme_bw()
data.frame(
Modelo=c("p1=3,p2=4,d=1",
"p1=1,p2=2,d=1",
"p1=1,p2=3,d=1"),
RMSE=c(
Metrics::rmse(sactnetusd_test, prontar1.usd),
Metrics::rmse(sactnetusd_test, prontar2.usd),
Metrics::rmse(sactnetusd_test, prontar3.usd)))%>%
arrange(RMSE)%>%
knitr::kable()
| Modelo | RMSE |
|---|---|
| p1=3,p2=4,d=1 | 334.8955 |
| p1=1,p2=2,d=1 | 360.1827 |
| p1=1,p2=3,d=1 | 368.9766 |
autoplot(sactnetusd_test)+
autolayer(prontar1.usd)+
autolayer(prontar2.usd)+
autolayer(prontar3.usd)+
theme_bw()
Thus the threshold delay, the number of lags in each regime and the threshold value are computed.
Setar1.usd <-
selectSETAR(
sactnetusd_train,
include = c("const", "trend","none", "both"),
m = 4,
thDelay = seq(0, 3, by = 1),
nthresh = 3,
criterion = "AIC",
type = "level",
plot = T,
trace = T
)
## Using maximum autoregressive order for low regime: mL = 4
## Using maximum autoregressive order for high regime: mH = 4
## Searching on 80 possible threshold values within regimes with sufficient ( 15% ) number of observations
## Searching on 5120 combinations of thresholds ( 80 ), thDelay ( 4 ), mL ( 4 ) and MM ( 4 )
##
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
Setar2.usd <-
selectSETAR(
sactnetusd_train,
m = 4,
d=2,
thDelay = seq(0, 3, by = 1),
nthresh = 3,
criterion = "AIC",
type = "level",
plot = T,
trace = T
)
## Using maximum autoregressive order for low regime: mL = 4
## Using maximum autoregressive order for high regime: mH = 4
## Searching on 78 possible threshold values within regimes with sufficient ( 15% ) number of observations
## Searching on 4992 combinations of thresholds ( 78 ), thDelay ( 4 ), mL ( 4 ) and MM ( 4 )
##
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
Setar3.usd <-
selectSETAR(
sactnetusd_train,
m = 4,
thDelay = seq(0, 3, by = 1),
nthresh = 3,
d = 1,
criterion = "AIC",
type = "level",
plot = T,
trace = T
)
## Using maximum autoregressive order for low regime: mL = 4
## Using maximum autoregressive order for high regime: mH = 4
## Searching on 80 possible threshold values within regimes with sufficient ( 15% ) number of observations
## Searching on 5120 combinations of thresholds ( 80 ), thDelay ( 4 ), mL ( 4 ) and MM ( 4 )
##
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.131
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
## 1 T: Trim not respected: 0.8534483 0.1465517 from th: 1212.776
## 1 T: Trim not respected: 0.862069 0.137931 from th: 1212.776
## 1 T: Trim not respected: 0.8706897 0.1293103 from th: 1212.776
Setar4.usd <-
selectSETAR(
sactnetusd_train,
m = 4,
thDelay = seq(0, 3, by = 1),
nthresh = 3,
d = 2,
criterion = "AIC",
type = "level",
plot = T,
trace = T
)
## Using maximum autoregressive order for low regime: mL = 4
## Using maximum autoregressive order for high regime: mH = 4
## Searching on 78 possible threshold values within regimes with sufficient ( 15% ) number of observations
## Searching on 4992 combinations of thresholds ( 78 ), thDelay ( 4 ), mL ( 4 ) and MM ( 4 )
##
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1202.79
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1207.644
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8571429 0.1428571 from th: 1208.27
## 1 T: Trim not respected: 0.875 0.125 from th: 1208.27
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.131
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.131
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
## 1 T: Trim not respected: 0.8660714 0.1339286 from th: 1212.776
## 1 T: Trim not respected: 0.8839286 0.1160714 from th: 1212.776
## 1 T: Trim not respected: 0.9017857 0.09821429 from th: 1212.776
Setar1.usd$allTh%>%
as.data.frame()%>%
arrange(AIC,thDelay,mL,mH)%>%
head(5)
## thDelay mL mH th AIC
## 1 2 1 3 1208.270 1035.463
## 2 2 1 3 1207.644 1035.873
## 3 2 1 3 1202.790 1035.887
## 4 1 1 1 1202.790 1036.336
## 5 1 2 1 1182.401 1036.624
Setar2.usd$allTh%>%
as.data.frame()%>%
arrange(AIC,thDelay,mL,mH)%>%
head(5)
## thDelay mL mH th AIC
## 1 0 1 4 1182.4010 1100.897
## 2 2 3 1 785.7637 1101.222
## 3 1 3 1 691.3097 1101.489
## 4 1 3 3 691.3097 1101.540
## 5 0 1 4 1193.4284 1101.591
Setar3.usd$allTh%>%
as.data.frame()%>%
arrange(AIC,thDelay,mL,mH)%>%
head(5)
## thDelay mL mH th AIC
## 1 2 1 3 1208.270 1035.463
## 2 2 1 3 1207.644 1035.873
## 3 2 1 3 1202.790 1035.887
## 4 1 1 1 1202.790 1036.336
## 5 1 2 1 1182.401 1036.624
Setar4.usd$allTh%>%
as.data.frame()%>%
arrange(AIC,thDelay,mL,mH)%>%
head(5)
## thDelay mL mH th AIC
## 1 0 1 4 1182.4010 1100.897
## 2 2 3 1 785.7637 1101.222
## 3 1 3 1 691.3097 1101.489
## 4 1 3 3 691.3097 1101.540
## 5 0 1 4 1193.4284 1101.591
modeloas1.usd <-
setar(
sactnetusd_train,
mL = 1,
mH = 3,
d=1,
nthresh = 1,
thDelay = 2,
type = "level"
)
##
## 1 T: Trim not respected: 0.8632479 0.1367521 from th: 1212.776
## Warning: Possible unit root in the high regime. Roots are: 1.0041 1.0041 0.9829
## Raiz Unitaria
summary(modeloas1.usd) #residuals variance = 0.005525, AIC = -632, MAPE = 0.4352%
##
## Non linear autoregressive model
##
## SETAR model ( 2 regimes)
## Coefficients:
## Low regime:
## const.L phiL.1
## 37.5800433 0.9690969
##
## High regime:
## const.H phiH.1 phiH.2 phiH.3
## -67.137862 1.105962 -1.081897 1.009118
##
## Threshold:
## -Variable: Z(t) = + (0) X(t)+ (0)X(t-1)+ (1)X(t-2)
## -Value: 1208
## Proportion of points in low regime: 83.76% High regime: 16.24%
##
## Residuals:
## Min 1Q Median 3Q Max
## -140.7273 -48.8234 -9.7786 36.9904 227.4330
##
## Fit:
## residuals variance = 5036, AIC = 1037, MAPE = 5.9%
##
## Coefficient(s):
##
## Estimate Std. Error t value Pr(>|t|)
## const.L 37.580043 31.407394 1.1965 0.233970
## phiL.1 0.969097 0.031934 30.3472 < 0.00000000000000022 ***
## const.H -67.137862 207.413771 -0.3237 0.746765
## phiH.1 1.105962 0.215038 5.1431 0.000001134 ***
## phiH.2 -1.081897 0.345016 -3.1358 0.002181 **
## phiH.3 1.009118 0.316420 3.1892 0.001843 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Threshold
## Variable: Z(t) = + (0) X(t) + (0) X(t-1)+ (1) X(t-2)
##
## Value: 1208
# plot(modeloas1)
checkresiduals(ts(modeloas1.usd$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.
modeloas2.usd <-
setar(
sactnetusd_train,
mL = 3,
mH = 1,
d=2,
nthresh = 1,
thDelay = 1,
type = "level"
)
##
## 1 T: Trim not respected: 0.8596491 0.1403509 from th: 1212.381
## Warning: Possible unit root in the low regime. Roots are: 0.9303 1.143 0.9303
## Raiz Unitaria
summary(modeloas2.usd) # residuals variance = 0.005857, AIC = -635, MAPE = 0.4584%
##
## Non linear autoregressive model
##
## SETAR model ( 2 regimes)
## Coefficients:
## Low regime:
## const.L phiL.1 phiL.2 phiL.3
## 858.2495437 0.5731090 0.1113798 -1.0108432
##
## High regime:
## const.H phiH.1
## 258.7562047 0.7787606
##
## Threshold:
## -Variable: Z(t) = + (0) X(t)+ (1)X(t-1)+ (0)X(t-2)
## -Value: 785.8
## Proportion of points in low regime: 22.81% High regime: 77.19%
##
## Residuals:
## Min 1Q Median 3Q Max
## -267.8547 -64.4825 -8.4419 55.1728 308.2301
##
## Fit:
## residuals variance = 8762, AIC = 1103, MAPE = 7.549%
##
## Coefficient(s):
##
## Estimate Std. Error t value Pr(>|t|)
## const.L 858.249544 248.319565 3.4562 0.0007705 ***
## phiL.1 0.573109 0.195945 2.9249 0.0041589 **
## phiL.2 0.111380 0.308654 0.3609 0.7188742
## phiL.3 -1.010843 0.297348 -3.3995 0.0009310 ***
## const.H 258.756205 76.771445 3.3705 0.0010249 **
## phiH.1 0.778761 0.068832 11.3140 < 0.00000000000000022 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Threshold
## Variable: Z(t) = + (0) X(t) + (1) X(t-1)+ (0) X(t-2)
##
## Value: 785.8
# plot(modeloas2)
checkresiduals(ts(modeloas2.usd$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.
modeloas3.usd <-
setar(
sactnetusd_train,
mL = 1,
mH = 1,
d=1,
nthresh = 1,
thDelay = 1,
type = "level"
)
##
## 1 T: Trim not respected: 0.8559322 0.1440678 from th: 1212.776
## Warning: Possible unit root in the high regime. Roots are: 0.926
## Warning: Possible unit root in the low regime. Roots are: 0.9973
## Raiz Unitaria
summary(modeloas3.usd) # residuals variance = 0.006319, AIC = -621, MAPE = 0.4621%
##
## Non linear autoregressive model
##
## SETAR model ( 2 regimes)
## Coefficients:
## Low regime:
## const.L phiL.1
## 14.375566 1.002708
##
## High regime:
## const.H phiH.1
## -139.242572 1.079965
##
## Threshold:
## -Variable: Z(t) = + (0) X(t)+ (1)X(t-1)
## -Value: 1203
## Proportion of points in low regime: 81.36% High regime: 18.64%
##
## Residuals:
## Min 1Q Median 3Q Max
## -143.9335 -53.2829 -5.4996 50.1964 240.7517
##
## Fit:
## residuals variance = 5265, AIC = 1038, MAPE = 6.015%
##
## Coefficient(s):
##
## Estimate Std. Error t value Pr(>|t|)
## const.L 14.375566 33.188627 0.4331 0.6657
## phiL.1 1.002708 0.034585 28.9927 < 0.00000000000000022 ***
## const.H -139.242572 155.823087 -0.8936 0.3734
## phiH.1 1.079965 0.121916 8.8583 0.00000000000001111 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Threshold
## Variable: Z(t) = + (0) X(t) + (1) X(t-1)
##
## Value: 1203
# plot(modeloas3)
checkresiduals(ts(modeloas3.usd$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.
modeloas4.usd <-
setar(
sactnetusd_train,
m = 3,
mL = 1,
mH = 2,
d=2,
nthresh = 1,
thDelay = 0,
type = "level"
)
## Warning: Possible unit root in the high regime. Roots are: 0.8301 6.3855
## Warning: Possible unit root in the low regime. Roots are: 0.9875
summary(modeloas4.usd) # residuals variance = 0.006319, AIC = -621, MAPE = 0.4621%
##
## Non linear autoregressive model
##
## SETAR model ( 2 regimes)
## Coefficients:
## Low regime:
## const.L phiL.1
## 22.905069 1.012612
##
## High regime:
## const.H phiH.1 phiH.2
## -351.2749796 1.0481121 0.1886645
##
## Threshold:
## -Variable: Z(t) = + (1) X(t)+ (0)X(t-1)+ (0)X(t-2)
## -Value: 1193
## Proportion of points in low regime: 78.95% High regime: 21.05%
##
## Residuals:
## Min 1Q Median 3Q Max
## -234.1334 -76.6856 2.1224 61.6316 277.0180
##
## Fit:
## residuals variance = 9172, AIC = 1107, MAPE = 8.311%
##
## Coefficient(s):
##
## Estimate Std. Error t value Pr(>|t|)
## const.L 22.905069 47.818727 0.4790 0.6328
## phiL.1 1.012612 0.050424 20.0821 < 0.00000000000000022 ***
## const.H -351.274980 251.367333 -1.3975 0.1650
## phiH.1 1.048112 0.246488 4.2522 0.0000433 ***
## phiH.2 0.188664 0.192645 0.9793 0.3295
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Threshold
## Variable: Z(t) = + (1) X(t) + (0) X(t-1)+ (0) X(t-2)
##
## Value: 1193
# plot(modeloas4)
checkresiduals(ts(modeloas4.usd$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.
AIC(modeloas1.usd)
## [1] 1036.912
AIC(modeloas2.usd)
## [1] 1103.381
AIC(modeloas3.usd)
## [1] 1038.25
AIC(modeloas4.usd)
## [1] 1106.873
pronsetar1.usd<- predict(modeloas1.usd, n.ahead = 7)
pronsetar2.usd<- predict(modeloas2.usd, n.ahead = 7)
pronsetar3.usd<- predict(modeloas3.usd, n.ahead = 7)
pronsetar4.usd<- predict(modeloas4.usd, n.ahead = 7)
fit1.usd<-ts(modeloas1.usd$fitted.values,start =c(2011,1),frequency = 12)
fit2.usd<-ts(modeloas2.usd$fitted.values,start =c(2011,1),frequency = 12)
fit3.usd<-ts(modeloas3.usd$fitted.values,start =c(2011,1),frequency = 12)
fit4.usd<-ts(modeloas4.usd$fitted.values,start =c(2011,1),frequency = 12)
autoplot(sactnetusd_train)+
autolayer(fit1.usd)+
autolayer(fit2.usd)+
autolayer(fit3.usd)+
autolayer(fit4.usd)+
theme_bw()
data.frame(
Modelo= c(
"1) m = 3,mL = 3,mH = 1, d=1",
"2) m = 3,mL = 2,mH = 3, d=2",
"3) m = 3,mL = 3,mH = 2, d=1",
"4) m = 3,mL = 1,mH = 2, d=2"
),
RMSE=c(
Metrics::rmse(sactnetusd_test, pronsetar1.usd),
Metrics::rmse(sactnetusd_test, pronsetar2.usd),
Metrics::rmse(sactnetusd_test, pronsetar3.usd),
Metrics::rmse(sactnetusd_test, pronsetar4.usd)))%>%
arrange(RMSE)%>%
knitr::kable()
| Modelo | RMSE |
|---|---|
| 4) m = 3,mL = 1,mH = 2, d=2 | 269.2250 |
| 1) m = 3,mL = 3,mH = 1, d=1 | 378.0577 |
| 3) m = 3,mL = 3,mH = 2, d=1 | 380.1196 |
| 2) m = 3,mL = 2,mH = 3, d=2 | 389.9989 |
autoplot(sactnetusd_test)+
autolayer(pronsetar1.usd)+
autolayer(pronsetar2.usd)+
autolayer(pronsetar3.usd)+
autolayer(pronsetar4.usd)+
theme_bw()
Metrics::rmse(sactnetusd_test,prontar1.usd)
## [1] 334.8955
Metrics::rmse(sactnetusd_test, pronsetar4.usd)
## [1] 269.225
autoplot(sactnetusd_test)+
autolayer(prontar1.usd)+
autolayer(pronsetar4.usd)+
theme_bw()
# Machine Learning
library(tidymodels)
library(modeltime)
library(modeltime.ensemble)
library(modeltime.resample)
library(timetk)
library(tidyverse)
colones%>%
plot_time_series(Date,value,.facet_ncol = 3, .interactive = F)
FORECAST_HORIZON <- 5
full_data_tbl <- colones%>%
select(Date,value)%>%
future_frame(
.date_var = Date,
.length_out = FORECAST_HORIZON,
.bind_data = T
)
is.na(full_data_tbl$value)
## [1] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [13] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [25] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [37] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [49] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [61] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [73] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [85] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [97] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [109] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [121] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [133] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [145] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [157] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [169] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [181] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [193] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [205] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [217] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [229] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [241] FALSE FALSE FALSE FALSE FALSE FALSE TRUE TRUE TRUE TRUE TRUE
data_prepared_tbl <- full_data_tbl[!is.na(full_data_tbl$value),]
# data_prepared_tbl%>%
# tk_summary_diagnostics()
future_tbl <- full_data_tbl[is.na(full_data_tbl$value),]
splits <- data_prepared_tbl%>%
arrange(Date)%>%
time_series_split(
data_var=Date,
assess = FORECAST_HORIZON,
cumulative = T
)
## Using date_var: Date
splits
## <Analysis/Assess/Total>
## <241/5/246>
recipe_spec_1 <- recipe(value~., training(splits))%>%
step_timeseries_signature(Date)%>%
## Elimina las columnas o atributos que no aportan
step_rm(matches("(.iso$)|(.xts)|(day)|(hour)|(minute)|(second)|(am.pm)|(week)")) %>%
step_normalize(Date_index.num,Date_year)%>%
step_mutate(Date_month = factor(Date_month,ordered = T))%>%
step_dummy(all_nominal(),one_hot = T)
recipe_spec_1 %>% prep() %>% juice() %>% glimpse()
## Rows: 241
## Columns: 30
## $ Date <date> 2001-02-01, 2001-03-01, 2001-04-01, 2001-05-01, 200…
## $ value <dbl> 12637.44, 13569.26, 11895.00, 12882.29, 13393.90, 11…
## $ Date_index.num <dbl> -1.720507, -1.707312, -1.692703, -1.678565, -1.66395…
## $ Date_year <dbl> -1.656391, -1.656391, -1.656391, -1.656391, -1.65639…
## $ Date_half <int> 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 2…
## $ Date_quarter <int> 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 1, 1, 1, 2, 2, 2, 3…
## $ Date_month_01 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0…
## $ Date_month_02 <dbl> 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0…
## $ Date_month_03 <dbl> 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0…
## $ Date_month_04 <dbl> 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0…
## $ Date_month_05 <dbl> 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0…
## $ Date_month_06 <dbl> 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0…
## $ Date_month_07 <dbl> 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1…
## $ Date_month_08 <dbl> 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_09 <dbl> 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_10 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_11 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_12 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_01 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_02 <dbl> 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0…
## $ Date_month.lbl_03 <dbl> 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0…
## $ Date_month.lbl_04 <dbl> 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0…
## $ Date_month.lbl_05 <dbl> 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0…
## $ Date_month.lbl_06 <dbl> 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0…
## $ Date_month.lbl_07 <dbl> 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1…
## $ Date_month.lbl_08 <dbl> 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_09 <dbl> 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_10 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_11 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_12 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0…
recipe_spec_2 <- recipe_spec_1%>%
update_role(Date,new_role = "ID")
recipe_spec_2 %>% prep() %>% juice() %>% glimpse()
## Rows: 241
## Columns: 30
## $ Date <date> 2001-02-01, 2001-03-01, 2001-04-01, 2001-05-01, 200…
## $ value <dbl> 12637.44, 13569.26, 11895.00, 12882.29, 13393.90, 11…
## $ Date_index.num <dbl> -1.720507, -1.707312, -1.692703, -1.678565, -1.66395…
## $ Date_year <dbl> -1.656391, -1.656391, -1.656391, -1.656391, -1.65639…
## $ Date_half <int> 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 2…
## $ Date_quarter <int> 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 1, 1, 1, 2, 2, 2, 3…
## $ Date_month_01 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0…
## $ Date_month_02 <dbl> 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0…
## $ Date_month_03 <dbl> 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0…
## $ Date_month_04 <dbl> 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0…
## $ Date_month_05 <dbl> 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0…
## $ Date_month_06 <dbl> 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0…
## $ Date_month_07 <dbl> 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1…
## $ Date_month_08 <dbl> 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_09 <dbl> 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_10 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_11 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_12 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_01 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_02 <dbl> 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0…
## $ Date_month.lbl_03 <dbl> 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0…
## $ Date_month.lbl_04 <dbl> 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0…
## $ Date_month.lbl_05 <dbl> 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0…
## $ Date_month.lbl_06 <dbl> 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0…
## $ Date_month.lbl_07 <dbl> 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1…
## $ Date_month.lbl_08 <dbl> 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_09 <dbl> 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_10 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_11 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_12 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0…
recipe_spec_1 %>% prep() %>% summary()
## # A tibble: 30 × 4
## variable type role source
## <chr> <chr> <chr> <chr>
## 1 Date date predictor original
## 2 value numeric outcome original
## 3 Date_index.num numeric predictor derived
## 4 Date_year numeric predictor derived
## 5 Date_half numeric predictor derived
## 6 Date_quarter numeric predictor derived
## 7 Date_month_01 numeric predictor derived
## 8 Date_month_02 numeric predictor derived
## 9 Date_month_03 numeric predictor derived
## 10 Date_month_04 numeric predictor derived
## # … with 20 more rows
recipe_spec_2 %>% prep() %>% summary()
## # A tibble: 30 × 4
## variable type role source
## <chr> <chr> <chr> <chr>
## 1 Date date ID original
## 2 value numeric outcome original
## 3 Date_index.num numeric predictor derived
## 4 Date_year numeric predictor derived
## 5 Date_half numeric predictor derived
## 6 Date_quarter numeric predictor derived
## 7 Date_month_01 numeric predictor derived
## 8 Date_month_02 numeric predictor derived
## 9 Date_month_03 numeric predictor derived
## 10 Date_month_04 numeric predictor derived
## # … with 20 more rows
wflw_fit_autoarima_boost <- workflow()%>%
add_model(
arima_boost(
min_n = 2,
learn_rate = 0.015
) %>%
set_engine(engine = "auto_arima_xgboost")
)%>%
add_recipe(recipe_spec_1)%>%
fit(training(splits))
## frequency = 12 observations per 1 year
wflw_fit_prophet <- workflow()%>%
add_model(
prophet_reg() %>% set_engine("prophet")
)%>%
add_recipe(recipe_spec_1)%>%
fit(training(splits))
## Disabling weekly seasonality. Run prophet with weekly.seasonality=TRUE to override this.
## Disabling daily seasonality. Run prophet with daily.seasonality=TRUE to override this.
wflw_fit_xgboost_0_015 <- workflow()%>%
add_model(
boost_tree(learn_rate=0.15) %>% set_engine("xgboost")
)%>%
add_recipe(recipe_spec_2)%>%
fit(training(splits))
wflw_fit_xgboost_0_1 <- workflow()%>%
add_model(
boost_tree(learn_rate=0.1) %>% set_engine("xgboost")
)%>%
add_recipe(recipe_spec_2)%>%
fit(training(splits))
wflw_fit_xgboost_0_3 <- workflow()%>%
add_model(
boost_tree(learn_rate=0.3) %>% set_engine("xgboost")
)%>%
add_recipe(recipe_spec_2)%>%
fit(training(splits))
wflw_fit_rf_1000 <- workflow()%>%
add_model(
rand_forest(
trees = 1000
) %>%
set_engine("randomForest")
)%>%
add_recipe(recipe_spec_2)%>%
fit(training(splits))
wflw_fit_rf_500 <- workflow()%>%
add_model(
rand_forest(
trees = 500
) %>%
set_engine("randomForest")
)%>%
add_recipe(recipe_spec_2)%>%
fit(training(splits))
wflw_fit_rf_200 <- workflow()%>%
add_model(
rand_forest(
trees = 200
) %>%
set_engine("randomForest")
)%>%
add_recipe(recipe_spec_2)%>%
fit(training(splits))
wflw_fit_svm <- workflow()%>%
add_model(
svm_rbf() %>% set_engine("kernlab")
)%>%
add_recipe(recipe_spec_2)%>%
fit(training(splits))
wflw_fit_prophet_boost <- workflow()%>%
add_model(
prophet_boost(
seasonality_yearly = F,
seasonality_weekly = F,
seasonality_daily = F,
) %>%
set_engine("prophet_xgboost")
)%>%
add_recipe(recipe_spec_1)%>%
fit(training(splits))
submodels_tbl <- modeltime_table(
wflw_fit_autoarima_boost,
#wflw_fit_prophet, #1
wflw_fit_prophet_boost, #2
#wflw_fit_xgboost_0_015, #3
#wflw_fit_xgboost_0_1, #4
wflw_fit_xgboost_0_3, #5
#wflw_fit_rf_1000, #6
wflw_fit_rf_500 #, #7
#wflw_fit_rf_200, #8
#wflw_fit_svm #9
)
submodels_tbl
## # Modeltime Table
## # A tibble: 4 × 3
## .model_id .model .model_desc
## <int> <list> <chr>
## 1 1 <workflow> ARIMA(1,1,2)(2,0,0)[12] W/ XGBOOST ERRORS
## 2 2 <workflow> PROPHET W/ XGBOOST ERRORS
## 3 3 <workflow> XGBOOST
## 4 4 <workflow> RANDOMFOREST
submodels_calibrated_tbl <- submodels_tbl %>%
modeltime_calibrate(testing(splits))
submodels_calibrated_tbl
## # Modeltime Table
## # A tibble: 4 × 5
## .model_id .model .model_desc .type .calibration_da…
## <int> <list> <chr> <chr> <list>
## 1 1 <workflow> ARIMA(1,1,2)(2,0,0)[12] W/ XGBOOS… Test <tibble [5 × 4]>
## 2 2 <workflow> PROPHET W/ XGBOOST ERRORS Test <tibble [5 × 4]>
## 3 3 <workflow> XGBOOST Test <tibble [5 × 4]>
## 4 4 <workflow> RANDOMFOREST Test <tibble [5 × 4]>
submodels_calibrated_tbl%>%
modeltime_accuracy()%>%
arrange(rmse)
## # A tibble: 4 × 9
## .model_id .model_desc .type mae mape mase smape rmse rsq
## <int> <chr> <chr> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl>
## 1 1 ARIMA(1,1,2)(2,0,0)[12]… Test 32499. 3.20 0.568 3.21 37479. 0.876
## 2 2 PROPHET W/ XGBOOST ERRO… Test 52131. 5.27 0.911 5.21 64436. 0.963
## 3 3 XGBOOST Test 46464. 4.47 0.812 4.67 66341. 0.521
## 4 4 RANDOMFOREST Test 63139. 6.10 1.10 6.36 76666. 0.837
submodels_calibrated_tbl %>%
modeltime_forecast(
new_data = testing(splits),
actual_data = data_prepared_tbl,
keep_data = T
)%>%
plot_modeltime_forecast(
.facet_ncol=2
)
submodels_refit_tbl <- submodels_calibrated_tbl %>%
modeltime_refit(data_prepared_tbl)
## frequency = 12 observations per 1 year
submodels_refit_tbl%>%
modeltime_forecast(
new_data = future_tbl,
actual_data = data_prepared_tbl,
keep_data = T
)%>%
plot_modeltime_forecast(
.facet_ncol=2
)
https://cran.r-project.org/web/packages/modeltime.resample/vignettes/getting-started.html
resamples_tscv <- time_series_cv(
data = data_prepared_tbl,
date_var = Date,
assess = FORECAST_HORIZON,
initial = "36 month",
skip = FORECAST_HORIZON,
slice_limit = 5
)
resamples_tscv
## # Time Series Cross Validation Plan
## # A tibble: 5 × 2
## splits id
## <list> <chr>
## 1 <split [36/5]> Slice1
## 2 <split [36/5]> Slice2
## 3 <split [36/5]> Slice3
## 4 <split [36/5]> Slice4
## 5 <split [36/5]> Slice5
resamples_tscv %>%
tk_time_series_cv_plan() %>%
plot_time_series_cv_plan(Date,
value,
.facet_ncol = 2,
.interactive = T)
Generate Resample Predictions
resamples_fitted <- submodels_tbl %>%
modeltime_fit_resamples(
resamples = resamples_tscv,
control = control_resamples(verbose = FALSE)
)
resamples_fitted
## # Modeltime Table
## # A tibble: 4 × 4
## .model_id .model .model_desc .resample_resul…
## <int> <list> <chr> <list>
## 1 1 <workflow> ARIMA(1,1,2)(2,0,0)[12] W/ XGBOOST ERRO… <rsmp[+]>
## 2 2 <workflow> PROPHET W/ XGBOOST ERRORS <rsmp[+]>
## 3 3 <workflow> XGBOOST <rsmp[+]>
## 4 4 <workflow> RANDOMFOREST <rsmp[+]>
Evaluate the Results
resamples_fitted %>%
plot_modeltime_resamples(
.point_size = 3,
.point_alpha = 0.8,
.interactive = T
)
## Warning: A correlation computation is required, but `estimate` is constant
## and has 0 standard deviation, resulting in a divide by 0 error. `NA` will be
## returned.
resamples_fitted %>%
modeltime_resample_accuracy(summary_fns = mean) %>%
table_modeltime_accuracy(.interactive = T)
## Warning: A correlation computation is required, but `estimate` is constant
## and has 0 standard deviation, resulting in a divide by 0 error. `NA` will be
## returned.
ensemble_fit_mean <- submodels_tbl %>%
#filter(!.model_id %in% c(1))%>%
ensemble_average(type="mean")
ensemble_fit_lm <- resamples_fitted %>%
ensemble_model_spec(
model_spec = linear_reg(
penalty = tune(),
mixture = tune()
) %>%
set_engine("glmnet"),
grid = 2,
control = control_grid(verbose = TRUE)
)
## ── Tuning Model Specification ───────────────────────────────────
## ℹ Performing 5-Fold Cross Validation.
## i Fold1: preprocessor 1/1
## ✓ Fold1: preprocessor 1/1
## i Fold1: preprocessor 1/1, model 1/2
## ✓ Fold1: preprocessor 1/1, model 1/2
## i Fold1: preprocessor 1/1, model 1/2 (predictions)
## i Fold1: preprocessor 1/1, model 2/2
## ✓ Fold1: preprocessor 1/1, model 2/2
## i Fold1: preprocessor 1/1, model 2/2 (predictions)
## i Fold2: preprocessor 1/1
## ✓ Fold2: preprocessor 1/1
## i Fold2: preprocessor 1/1, model 1/2
## ✓ Fold2: preprocessor 1/1, model 1/2
## i Fold2: preprocessor 1/1, model 1/2 (predictions)
## i Fold2: preprocessor 1/1, model 2/2
## ✓ Fold2: preprocessor 1/1, model 2/2
## i Fold2: preprocessor 1/1, model 2/2 (predictions)
## i Fold3: preprocessor 1/1
## ✓ Fold3: preprocessor 1/1
## i Fold3: preprocessor 1/1, model 1/2
## ✓ Fold3: preprocessor 1/1, model 1/2
## i Fold3: preprocessor 1/1, model 1/2 (predictions)
## i Fold3: preprocessor 1/1, model 2/2
## ✓ Fold3: preprocessor 1/1, model 2/2
## i Fold3: preprocessor 1/1, model 2/2 (predictions)
## i Fold4: preprocessor 1/1
## ✓ Fold4: preprocessor 1/1
## i Fold4: preprocessor 1/1, model 1/2
## ✓ Fold4: preprocessor 1/1, model 1/2
## i Fold4: preprocessor 1/1, model 1/2 (predictions)
## i Fold4: preprocessor 1/1, model 2/2
## ✓ Fold4: preprocessor 1/1, model 2/2
## i Fold4: preprocessor 1/1, model 2/2 (predictions)
## i Fold5: preprocessor 1/1
## ✓ Fold5: preprocessor 1/1
## i Fold5: preprocessor 1/1, model 1/2
## ✓ Fold5: preprocessor 1/1, model 1/2
## i Fold5: preprocessor 1/1, model 1/2 (predictions)
## i Fold5: preprocessor 1/1, model 2/2
## ✓ Fold5: preprocessor 1/1, model 2/2
## i Fold5: preprocessor 1/1, model 2/2 (predictions)
## ✓ Finished tuning Model Specification.
## ℹ Model Parameters:
## # A tibble: 1 × 8
## penalty mixture .metric .estimator mean n std_err .config
## <dbl> <dbl> <chr> <chr> <dbl> <int> <dbl> <chr>
## 1 0.000109 0.0673 rmse standard 104459. 5 10403. Preprocessor1_Model1
## ℹ Prediction Error Comparison:
## # A tibble: 5 × 3
## .model_id rmse .model_desc
## <chr> <dbl> <chr>
## 1 1 128924. ARIMA(1,1,2)(2,0,0)[12] W/ XGBOOST ERRORS
## 2 2 116572. PROPHET W/ XGBOOST ERRORS
## 3 3 121334. XGBOOST
## 4 4 127417. RANDOMFOREST
## 5 ensemble 87205. ENSEMBLE (MODEL SPEC)
##
## ── Final Model ──────────────────────────────────────────────────
## ℹ Model Workflow:
## ══ Workflow [trained] ══════════════════════════════════════════════════════════
## Preprocessor: Recipe
## Model: linear_reg()
##
## ── Preprocessor ────────────────────────────────────────────────────────────────
## 0 Recipe Steps
##
## ── Model ───────────────────────────────────────────────────────────────────────
##
## Call: glmnet::glmnet(x = maybe_matrix(x), y = y, family = "gaussian", alpha = ~0.06726714601391)
##
## Df %Dev Lambda
## 1 0 0.00 1487000
## 2 3 1.91 1354000
## 3 4 4.53 1234000
## 4 4 7.32 1125000
## 5 4 10.11 1025000
## 6 4 12.88 933600
## 7 4 15.63 850700
## 8 4 18.32 775100
## 9 4 20.94 706200
## 10 4 23.49 643500
## 11 4 25.95 586300
## 12 4 28.30 534200
## 13 4 30.54 486800
## 14 4 32.65 443500
## 15 4 34.64 404100
## 16 4 36.50 368200
## 17 4 38.23 335500
## 18 4 39.82 305700
## 19 4 41.28 278600
## 20 4 42.62 253800
## 21 4 43.83 231300
## 22 4 44.93 210700
## 23 4 45.93 192000
## 24 4 46.82 174900
## 25 4 47.62 159400
## 26 4 48.33 145200
## 27 4 48.97 132300
## 28 4 49.53 120600
## 29 4 50.04 109900
## 30 4 50.50 100100
## 31 4 50.90 91210
## 32 4 51.27 83110
## 33 4 51.60 75730
## 34 4 51.90 69000
## 35 4 52.17 62870
## 36 4 52.42 57280
## 37 4 52.66 52200
## 38 4 52.88 47560
## 39 4 53.09 43330
## 40 4 53.29 39480
## 41 4 53.48 35980
## 42 4 53.67 32780
## 43 4 53.85 29870
## 44 3 53.96 27210
## 45 3 54.04 24800
## 46 3 54.11 22590
##
## ...
## and 54 more lines.
##
## 6.759 sec elapsed
ensemble_fit_xg<- resamples_fitted %>%
ensemble_model_spec(
model_spec = boost_tree(
mtry=tune(),
trees=tune(),
learn_rate = tune()
) %>% set_engine("xgboost"),
control = control_grid(verbose = TRUE)
)
## ── Tuning Model Specification ───────────────────────────────────
## ℹ Performing 5-Fold Cross Validation.
## i Creating pre-processing data to finalize unknown parameter: mtry
## i Fold1: preprocessor 1/1
## ✓ Fold1: preprocessor 1/1
## i Fold1: preprocessor 1/1, model 1/6
## ✓ Fold1: preprocessor 1/1, model 1/6
## i Fold1: preprocessor 1/1, model 1/6 (predictions)
## i Fold1: preprocessor 1/1, model 2/6
## ✓ Fold1: preprocessor 1/1, model 2/6
## i Fold1: preprocessor 1/1, model 2/6 (predictions)
## i Fold1: preprocessor 1/1, model 3/6
## ✓ Fold1: preprocessor 1/1, model 3/6
## i Fold1: preprocessor 1/1, model 3/6 (predictions)
## i Fold1: preprocessor 1/1, model 4/6
## ✓ Fold1: preprocessor 1/1, model 4/6
## i Fold1: preprocessor 1/1, model 4/6 (predictions)
## i Fold1: preprocessor 1/1, model 5/6
## ✓ Fold1: preprocessor 1/1, model 5/6
## i Fold1: preprocessor 1/1, model 5/6 (predictions)
## i Fold1: preprocessor 1/1, model 6/6
## ✓ Fold1: preprocessor 1/1, model 6/6
## i Fold1: preprocessor 1/1, model 6/6 (predictions)
## i Fold2: preprocessor 1/1
## ✓ Fold2: preprocessor 1/1
## i Fold2: preprocessor 1/1, model 1/6
## ✓ Fold2: preprocessor 1/1, model 1/6
## i Fold2: preprocessor 1/1, model 1/6 (predictions)
## i Fold2: preprocessor 1/1, model 2/6
## ✓ Fold2: preprocessor 1/1, model 2/6
## i Fold2: preprocessor 1/1, model 2/6 (predictions)
## i Fold2: preprocessor 1/1, model 3/6
## ✓ Fold2: preprocessor 1/1, model 3/6
## i Fold2: preprocessor 1/1, model 3/6 (predictions)
## i Fold2: preprocessor 1/1, model 4/6
## ✓ Fold2: preprocessor 1/1, model 4/6
## i Fold2: preprocessor 1/1, model 4/6 (predictions)
## i Fold2: preprocessor 1/1, model 5/6
## ✓ Fold2: preprocessor 1/1, model 5/6
## i Fold2: preprocessor 1/1, model 5/6 (predictions)
## i Fold2: preprocessor 1/1, model 6/6
## ✓ Fold2: preprocessor 1/1, model 6/6
## i Fold2: preprocessor 1/1, model 6/6 (predictions)
## i Fold3: preprocessor 1/1
## ✓ Fold3: preprocessor 1/1
## i Fold3: preprocessor 1/1, model 1/6
## ✓ Fold3: preprocessor 1/1, model 1/6
## i Fold3: preprocessor 1/1, model 1/6 (predictions)
## i Fold3: preprocessor 1/1, model 2/6
## ✓ Fold3: preprocessor 1/1, model 2/6
## i Fold3: preprocessor 1/1, model 2/6 (predictions)
## i Fold3: preprocessor 1/1, model 3/6
## ✓ Fold3: preprocessor 1/1, model 3/6
## i Fold3: preprocessor 1/1, model 3/6 (predictions)
## i Fold3: preprocessor 1/1, model 4/6
## ✓ Fold3: preprocessor 1/1, model 4/6
## i Fold3: preprocessor 1/1, model 4/6 (predictions)
## i Fold3: preprocessor 1/1, model 5/6
## ✓ Fold3: preprocessor 1/1, model 5/6
## i Fold3: preprocessor 1/1, model 5/6 (predictions)
## i Fold3: preprocessor 1/1, model 6/6
## ✓ Fold3: preprocessor 1/1, model 6/6
## i Fold3: preprocessor 1/1, model 6/6 (predictions)
## i Fold4: preprocessor 1/1
## ✓ Fold4: preprocessor 1/1
## i Fold4: preprocessor 1/1, model 1/6
## ✓ Fold4: preprocessor 1/1, model 1/6
## i Fold4: preprocessor 1/1, model 1/6 (predictions)
## i Fold4: preprocessor 1/1, model 2/6
## ✓ Fold4: preprocessor 1/1, model 2/6
## i Fold4: preprocessor 1/1, model 2/6 (predictions)
## i Fold4: preprocessor 1/1, model 3/6
## ✓ Fold4: preprocessor 1/1, model 3/6
## i Fold4: preprocessor 1/1, model 3/6 (predictions)
## i Fold4: preprocessor 1/1, model 4/6
## ✓ Fold4: preprocessor 1/1, model 4/6
## i Fold4: preprocessor 1/1, model 4/6 (predictions)
## i Fold4: preprocessor 1/1, model 5/6
## ✓ Fold4: preprocessor 1/1, model 5/6
## i Fold4: preprocessor 1/1, model 5/6 (predictions)
## i Fold4: preprocessor 1/1, model 6/6
## ✓ Fold4: preprocessor 1/1, model 6/6
## i Fold4: preprocessor 1/1, model 6/6 (predictions)
## i Fold5: preprocessor 1/1
## ✓ Fold5: preprocessor 1/1
## i Fold5: preprocessor 1/1, model 1/6
## ✓ Fold5: preprocessor 1/1, model 1/6
## i Fold5: preprocessor 1/1, model 1/6 (predictions)
## i Fold5: preprocessor 1/1, model 2/6
## ✓ Fold5: preprocessor 1/1, model 2/6
## i Fold5: preprocessor 1/1, model 2/6 (predictions)
## i Fold5: preprocessor 1/1, model 3/6
## ✓ Fold5: preprocessor 1/1, model 3/6
## i Fold5: preprocessor 1/1, model 3/6 (predictions)
## i Fold5: preprocessor 1/1, model 4/6
## ✓ Fold5: preprocessor 1/1, model 4/6
## i Fold5: preprocessor 1/1, model 4/6 (predictions)
## i Fold5: preprocessor 1/1, model 5/6
## ✓ Fold5: preprocessor 1/1, model 5/6
## i Fold5: preprocessor 1/1, model 5/6 (predictions)
## i Fold5: preprocessor 1/1, model 6/6
## ✓ Fold5: preprocessor 1/1, model 6/6
## i Fold5: preprocessor 1/1, model 6/6 (predictions)
## ✓ Finished tuning Model Specification.
## ℹ Model Parameters:
## # A tibble: 1 × 9
## mtry trees learn_rate .metric .estimator mean n std_err .config
## <int> <int> <dbl> <chr> <chr> <dbl> <int> <dbl> <chr>
## 1 1 917 0.0162 rmse standard 84631. 5 10806. Preprocessor1_…
## ℹ Prediction Error Comparison:
## # A tibble: 5 × 3
## .model_id rmse .model_desc
## <chr> <dbl> <chr>
## 1 1 128924. ARIMA(1,1,2)(2,0,0)[12] W/ XGBOOST ERRORS
## 2 2 116572. PROPHET W/ XGBOOST ERRORS
## 3 3 121334. XGBOOST
## 4 4 127417. RANDOMFOREST
## 5 ensemble 438. ENSEMBLE (MODEL SPEC)
##
## ── Final Model ──────────────────────────────────────────────────
## ℹ Model Workflow:
## ══ Workflow [trained] ══════════════════════════════════════════════════════════
## Preprocessor: Recipe
## Model: boost_tree()
##
## ── Preprocessor ────────────────────────────────────────────────────────────────
## 0 Recipe Steps
##
## ── Model ───────────────────────────────────────────────────────────────────────
## ##### xgb.Booster
## raw: 1.4 Mb
## call:
## xgboost::xgb.train(params = list(eta = 0.0161558189357943, max_depth = 6,
## gamma = 0, colsample_bytree = 1, colsample_bynode = 0.25,
## min_child_weight = 1, subsample = 1, objective = "reg:squarederror"),
## data = x$data, nrounds = 917L, watchlist = x$watchlist, verbose = 0,
## nthread = 1)
## params (as set within xgb.train):
## eta = "0.0161558189357943", max_depth = "6", gamma = "0", colsample_bytree = "1", colsample_bynode = "0.25", min_child_weight = "1", subsample = "1", objective = "reg:squarederror", nthread = "1", validate_parameters = "TRUE"
## xgb.attributes:
## niter
## callbacks:
## cb.evaluation.log()
## # of features: 4
## niter: 917
## nfeatures : 4
## evaluation_log:
## iter training_rmse
## 1 919759.9375
## 2 905785.1875
## ---
## 916 441.3931
## 917 438.1454
##
## 23.973 sec elapsed
ensemble_tbl<- modeltime_table(
ensemble_fit_mean,
ensemble_fit_lm,
ensemble_fit_xg
)
ensemble_tbl%>%
combine_modeltime_tables(submodels_tbl)%>%
modeltime_accuracy(testing(splits))%>%
arrange(rmse,mae,mape,mase)%>%
knitr::kable()
| .model_id | .model_desc | .type | mae | mape | mase | smape | rmse | rsq |
|---|---|---|---|---|---|---|---|---|
| 4 | ARIMA(1,1,2)(2,0,0)[12] W/ XGBOOST ERRORS | Test | 32498.55 | 3.200357 | 0.5677113 | 3.206721 | 37479.12 | 0.8764449 |
| 1 | ENSEMBLE (MEAN): 4 MODELS | Test | 31041.51 | 2.889719 | 0.5422586 | 2.991124 | 50842.19 | 0.8127373 |
| 3 | ENSEMBLE (XGBOOST STACK): 4 MODELS | Test | 59729.73 | 6.062162 | 1.0434081 | 6.070221 | 63047.91 | 0.4063139 |
| 5 | PROPHET W/ XGBOOST ERRORS | Test | 52131.10 | 5.268918 | 0.9106689 | 5.210271 | 64435.97 | 0.9630310 |
| 2 | ENSEMBLE (GLMNET STACK): 4 MODELS | Test | 54520.36 | 5.302068 | 0.9524065 | 5.375310 | 66232.61 | 0.2497914 |
| 6 | XGBOOST | Test | 46463.83 | 4.473893 | 0.8116684 | 4.667059 | 66341.33 | 0.5212406 |
| 7 | RANDOMFOREST | Test | 63139.02 | 6.098634 | 1.1029644 | 6.364470 | 76666.44 | 0.8367149 |
ensemble_tbl%>%
modeltime_calibrate(testing(splits))%>%
modeltime_forecast(
new_data = testing(splits),
actual_data = data_prepared_tbl,
keep_data = T,
conf_by_id = T,
conf_interval = 0.95
)%>%
plot_modeltime_forecast(
.facet_ncol=2
)
## Warning: The 'id' column in calibration data was not detected. Global Confidence
## Interval is being returned.
ensemble_tbl%>%
combine_modeltime_tables(submodels_tbl)%>%
modeltime_calibrate(testing(splits))%>%
modeltime_forecast(
new_data = testing(splits),
actual_data = data_prepared_tbl,
keep_data = T
)%>%
plot_modeltime_forecast(
.facet_ncol=2
)
ensemble_tbl_all_model<-ensemble_tbl%>%
combine_modeltime_tables(submodels_tbl)
ensemble_refit_tbl <- ensemble_tbl%>%
modeltime_refit(data_prepared_tbl)
## frequency = 12 observations per 1 year
## Warning in mdl_time_refit.mdl_time_ensemble_model_spec(...): 'resamples' not
## provided during refitting. Submodels will be refit, but the meta-learner will
## *not* be refit. You can provide 'resamples' via `modeltime_refit(object, data,
## resamples, control)`. Proceeding by refitting the submodels only.
## frequency = 12 observations per 1 year
## Warning in mdl_time_refit.mdl_time_ensemble_model_spec(...): 'resamples' not
## provided during refitting. Submodels will be refit, but the meta-learner will
## *not* be refit. You can provide 'resamples' via `modeltime_refit(object, data,
## resamples, control)`. Proceeding by refitting the submodels only.
## frequency = 12 observations per 1 year
ensemble_refit_tbl%>%
modeltime_forecast(
new_data = future_tbl,
actual_data = data_prepared_tbl,
keep_data = T
)%>%
plot_modeltime_forecast(
.facet_ncol=2
)
## Warning: Expecting the following names to be in the data frame: .conf_hi, .conf_lo.
## Proceeding with '.conf_interval_show = FALSE' to visualize the forecast without confidence intervals.
## Alternatively, try using `modeltime_calibrate()` before forecasting to add confidence intervals.
dolares%>%
plot_time_series(Date,value,.facet_ncol = 3, .interactive = F)
FORECAST_HORIZON <- 5
full_data_tbl <- dolares%>%
select(Date,value)%>%
future_frame(
.date_var = Date,
.length_out = FORECAST_HORIZON,
.bind_data = T
)
data_prepared_tbl <- full_data_tbl[!is.na(full_data_tbl$value),]
# data_prepared_tbl%>%
# tk_summary_diagnostics()
future_tbl <- full_data_tbl[is.na(full_data_tbl$value),]
splits <- data_prepared_tbl%>%
arrange(Date)%>%
time_series_split(
data_var=Date,
assess = FORECAST_HORIZON,
cumulative = T
)
## Using date_var: Date
splits
## <Analysis/Assess/Total>
## <241/5/246>
recipe_spec_1 <- recipe(value~., training(splits))%>%
step_timeseries_signature(Date)%>%
## Elimina las columnas o atributos que no aportan
step_rm(matches("(.iso$)|(.xts)|(day)|(hour)|(minute)|(second)|(am.pm)|(week)")) %>%
step_normalize(Date_index.num,Date_year)%>%
step_mutate(Date_month = factor(Date_month,ordered = T))%>%
step_dummy(all_nominal(),one_hot = T)
recipe_spec_1 %>% prep() %>% juice() %>% glimpse()
## Rows: 241
## Columns: 30
## $ Date <date> 2001-02-01, 2001-03-01, 2001-04-01, 2001-05-01, 200…
## $ value <dbl> 21.8817, 24.1889, 24.6323, 30.7223, 30.6749, 31.9302…
## $ Date_index.num <dbl> -1.720507, -1.707312, -1.692703, -1.678565, -1.66395…
## $ Date_year <dbl> -1.656391, -1.656391, -1.656391, -1.656391, -1.65639…
## $ Date_half <int> 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 2…
## $ Date_quarter <int> 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 1, 1, 1, 2, 2, 2, 3…
## $ Date_month_01 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0…
## $ Date_month_02 <dbl> 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0…
## $ Date_month_03 <dbl> 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0…
## $ Date_month_04 <dbl> 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0…
## $ Date_month_05 <dbl> 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0…
## $ Date_month_06 <dbl> 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0…
## $ Date_month_07 <dbl> 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1…
## $ Date_month_08 <dbl> 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_09 <dbl> 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_10 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_11 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_12 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_01 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_02 <dbl> 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0…
## $ Date_month.lbl_03 <dbl> 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0…
## $ Date_month.lbl_04 <dbl> 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0…
## $ Date_month.lbl_05 <dbl> 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0…
## $ Date_month.lbl_06 <dbl> 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0…
## $ Date_month.lbl_07 <dbl> 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1…
## $ Date_month.lbl_08 <dbl> 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_09 <dbl> 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_10 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_11 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_12 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0…
recipe_spec_2 <- recipe_spec_1%>%
update_role(Date,new_role = "ID")
recipe_spec_2 %>% prep() %>% juice() %>% glimpse()
## Rows: 241
## Columns: 30
## $ Date <date> 2001-02-01, 2001-03-01, 2001-04-01, 2001-05-01, 200…
## $ value <dbl> 21.8817, 24.1889, 24.6323, 30.7223, 30.6749, 31.9302…
## $ Date_index.num <dbl> -1.720507, -1.707312, -1.692703, -1.678565, -1.66395…
## $ Date_year <dbl> -1.656391, -1.656391, -1.656391, -1.656391, -1.65639…
## $ Date_half <int> 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 2…
## $ Date_quarter <int> 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 1, 1, 1, 2, 2, 2, 3…
## $ Date_month_01 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0…
## $ Date_month_02 <dbl> 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0…
## $ Date_month_03 <dbl> 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0…
## $ Date_month_04 <dbl> 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0…
## $ Date_month_05 <dbl> 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0…
## $ Date_month_06 <dbl> 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0…
## $ Date_month_07 <dbl> 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1…
## $ Date_month_08 <dbl> 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_09 <dbl> 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_10 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_11 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_12 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_01 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_02 <dbl> 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0…
## $ Date_month.lbl_03 <dbl> 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0…
## $ Date_month.lbl_04 <dbl> 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0…
## $ Date_month.lbl_05 <dbl> 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0…
## $ Date_month.lbl_06 <dbl> 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0…
## $ Date_month.lbl_07 <dbl> 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1…
## $ Date_month.lbl_08 <dbl> 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_09 <dbl> 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_10 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_11 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_12 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0…
recipe_spec_1 %>% prep() %>% summary()
## # A tibble: 30 × 4
## variable type role source
## <chr> <chr> <chr> <chr>
## 1 Date date predictor original
## 2 value numeric outcome original
## 3 Date_index.num numeric predictor derived
## 4 Date_year numeric predictor derived
## 5 Date_half numeric predictor derived
## 6 Date_quarter numeric predictor derived
## 7 Date_month_01 numeric predictor derived
## 8 Date_month_02 numeric predictor derived
## 9 Date_month_03 numeric predictor derived
## 10 Date_month_04 numeric predictor derived
## # … with 20 more rows
recipe_spec_2 %>% prep() %>% summary()
## # A tibble: 30 × 4
## variable type role source
## <chr> <chr> <chr> <chr>
## 1 Date date ID original
## 2 value numeric outcome original
## 3 Date_index.num numeric predictor derived
## 4 Date_year numeric predictor derived
## 5 Date_half numeric predictor derived
## 6 Date_quarter numeric predictor derived
## 7 Date_month_01 numeric predictor derived
## 8 Date_month_02 numeric predictor derived
## 9 Date_month_03 numeric predictor derived
## 10 Date_month_04 numeric predictor derived
## # … with 20 more rows
wflw_fit_autoarima_boost <- workflow()%>%
add_model(
arima_boost(
min_n = 2,
learn_rate = 0.015
) %>%
set_engine(engine = "auto_arima_xgboost")
)%>%
add_recipe(recipe_spec_1)%>%
fit(training(splits))
## frequency = 12 observations per 1 year
wflw_fit_prophet <- workflow()%>%
add_model(
prophet_reg() %>% set_engine("prophet")
)%>%
add_recipe(recipe_spec_1)%>%
fit(training(splits))
## Disabling weekly seasonality. Run prophet with weekly.seasonality=TRUE to override this.
## Disabling daily seasonality. Run prophet with daily.seasonality=TRUE to override this.
wflw_fit_xgboost_0_015 <- workflow()%>%
add_model(
boost_tree(learn_rate=0.15) %>% set_engine("xgboost")
)%>%
add_recipe(recipe_spec_2)%>%
fit(training(splits))
wflw_fit_xgboost_0_1 <- workflow()%>%
add_model(
boost_tree(learn_rate=0.1) %>% set_engine("xgboost")
)%>%
add_recipe(recipe_spec_2)%>%
fit(training(splits))
wflw_fit_xgboost_0_3 <- workflow()%>%
add_model(
boost_tree(learn_rate=0.3) %>% set_engine("xgboost")
)%>%
add_recipe(recipe_spec_2)%>%
fit(training(splits))
wflw_fit_rf_1000 <- workflow()%>%
add_model(
rand_forest(
trees = 1000
) %>%
set_engine("randomForest")
)%>%
add_recipe(recipe_spec_2)%>%
fit(training(splits))
wflw_fit_rf_500 <- workflow()%>%
add_model(
rand_forest(
trees = 500
) %>%
set_engine("randomForest")
)%>%
add_recipe(recipe_spec_2)%>%
fit(training(splits))
wflw_fit_rf_200 <- workflow()%>%
add_model(
rand_forest(
trees = 200
) %>%
set_engine("randomForest")
)%>%
add_recipe(recipe_spec_2)%>%
fit(training(splits))
wflw_fit_svm <- workflow()%>%
add_model(
svm_rbf() %>% set_engine("kernlab")
)%>%
add_recipe(recipe_spec_2)%>%
fit(training(splits))
wflw_fit_prophet_boost <- workflow()%>%
add_model(
prophet_boost(
seasonality_yearly = F,
seasonality_weekly = F,
seasonality_daily = F,
) %>%
set_engine("prophet_xgboost")
)%>%
add_recipe(recipe_spec_1)%>%
fit(training(splits))
submodels_tbl <- modeltime_table(
wflw_fit_autoarima_boost,
#wflw_fit_prophet, #1
wflw_fit_prophet_boost, #2
#wflw_fit_xgboost_0_015, #3
#wflw_fit_xgboost_0_1, #4
wflw_fit_xgboost_0_3, #5
#wflw_fit_rf_1000, #6
wflw_fit_rf_500 #, #7
#wflw_fit_rf_200, #8
#wflw_fit_svm #9
)
submodels_tbl
## # Modeltime Table
## # A tibble: 4 × 3
## .model_id .model .model_desc
## <int> <list> <chr>
## 1 1 <workflow> ARIMA(0,1,0)(1,0,0)[12] WITH DRIFT W/ XGBOOST ERRORS
## 2 2 <workflow> PROPHET W/ XGBOOST ERRORS
## 3 3 <workflow> XGBOOST
## 4 4 <workflow> RANDOMFOREST
submodels_calibrated_tbl <- submodels_tbl %>%
modeltime_calibrate(testing(splits))
submodels_calibrated_tbl
## # Modeltime Table
## # A tibble: 4 × 5
## .model_id .model .model_desc .type .calibration_da…
## <int> <list> <chr> <chr> <list>
## 1 1 <workflow> ARIMA(0,1,0)(1,0,0)[12] WITH DRIF… Test <tibble [5 × 4]>
## 2 2 <workflow> PROPHET W/ XGBOOST ERRORS Test <tibble [5 × 4]>
## 3 3 <workflow> XGBOOST Test <tibble [5 × 4]>
## 4 4 <workflow> RANDOMFOREST Test <tibble [5 × 4]>
submodels_calibrated_tbl%>%
modeltime_accuracy()%>%
arrange(rmse)
## Warning: A correlation computation is required, but `estimate` is constant
## and has 0 standard deviation, resulting in a divide by 0 error. `NA` will be
## returned.
## # A tibble: 4 × 9
## .model_id .model_desc .type mae mape mase smape rmse rsq
## <int> <chr> <chr> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl>
## 1 1 ARIMA(0,1,0)(1,0,0)[12]… Test 66.6 3.76 0.774 3.81 75.8 0.792
## 2 2 PROPHET W/ XGBOOST ERRO… Test 118. 6.53 1.37 6.83 141. 0.0919
## 3 3 XGBOOST Test 168. 9.34 1.95 9.93 192. NA
## 4 4 RANDOMFOREST Test 309. 17.5 3.59 19.2 321. 0.347
submodels_calibrated_tbl %>%
modeltime_forecast(
new_data = testing(splits),
actual_data = data_prepared_tbl,
keep_data = T
)%>%
plot_modeltime_forecast(
.facet_ncol=2
)
submodels_refit_tbl <- submodels_calibrated_tbl %>%
modeltime_refit(data_prepared_tbl)
## frequency = 12 observations per 1 year
submodels_refit_tbl%>%
modeltime_forecast(
new_data = future_tbl,
actual_data = data_prepared_tbl,
keep_data = T
)%>%
plot_modeltime_forecast(
.facet_ncol=2
)
https://cran.r-project.org/web/packages/modeltime.resample/vignettes/getting-started.html
resamples_tscv <- time_series_cv(
data = data_prepared_tbl,
date_var = Date,
assess = FORECAST_HORIZON,
initial = "36 month",
skip = FORECAST_HORIZON,
slice_limit = 5
)
resamples_tscv
## # Time Series Cross Validation Plan
## # A tibble: 5 × 2
## splits id
## <list> <chr>
## 1 <split [36/5]> Slice1
## 2 <split [36/5]> Slice2
## 3 <split [36/5]> Slice3
## 4 <split [36/5]> Slice4
## 5 <split [36/5]> Slice5
resamples_tscv %>%
tk_time_series_cv_plan() %>%
plot_time_series_cv_plan(Date,
value,
.facet_ncol = 2,
.interactive = T)
Generate Resample Predictions
resamples_fitted <- submodels_tbl %>%
modeltime_fit_resamples(
resamples = resamples_tscv,
control = control_resamples(verbose = FALSE)
)
resamples_fitted
## # Modeltime Table
## # A tibble: 4 × 4
## .model_id .model .model_desc .resample_resul…
## <int> <list> <chr> <list>
## 1 1 <workflow> ARIMA(0,1,0)(1,0,0)[12] WITH DRIFT W/ X… <rsmp[+]>
## 2 2 <workflow> PROPHET W/ XGBOOST ERRORS <rsmp[+]>
## 3 3 <workflow> XGBOOST <rsmp[+]>
## 4 4 <workflow> RANDOMFOREST <rsmp[+]>
Evaluate the Results
resamples_fitted %>%
plot_modeltime_resamples(
.point_size = 3,
.point_alpha = 0.8,
.interactive = T
)
## Warning: A correlation computation is required, but `estimate` is constant
## and has 0 standard deviation, resulting in a divide by 0 error. `NA` will be
## returned.
resamples_fitted %>%
modeltime_resample_accuracy(summary_fns = mean) %>%
table_modeltime_accuracy(.interactive = T)
## Warning: A correlation computation is required, but `estimate` is constant
## and has 0 standard deviation, resulting in a divide by 0 error. `NA` will be
## returned.
ensemble_fit_mean <- submodels_tbl %>%
#filter(!.model_id %in% c(1))%>%
ensemble_average(type="mean")
ensemble_fit_lm <- resamples_fitted %>%
ensemble_model_spec(
model_spec = linear_reg(
penalty = tune(),
mixture = tune()
) %>%
set_engine("glmnet"),
grid = 2,
control = control_grid(verbose = TRUE)
)
## ── Tuning Model Specification ───────────────────────────────────
## ℹ Performing 5-Fold Cross Validation.
## i Fold1: preprocessor 1/1
## ✓ Fold1: preprocessor 1/1
## i Fold1: preprocessor 1/1, model 1/2
## ✓ Fold1: preprocessor 1/1, model 1/2
## i Fold1: preprocessor 1/1, model 1/2 (predictions)
## i Fold1: preprocessor 1/1, model 2/2
## ✓ Fold1: preprocessor 1/1, model 2/2
## i Fold1: preprocessor 1/1, model 2/2 (predictions)
## i Fold2: preprocessor 1/1
## ✓ Fold2: preprocessor 1/1
## i Fold2: preprocessor 1/1, model 1/2
## ✓ Fold2: preprocessor 1/1, model 1/2
## i Fold2: preprocessor 1/1, model 1/2 (predictions)
## i Fold2: preprocessor 1/1, model 2/2
## ✓ Fold2: preprocessor 1/1, model 2/2
## i Fold2: preprocessor 1/1, model 2/2 (predictions)
## i Fold3: preprocessor 1/1
## ✓ Fold3: preprocessor 1/1
## i Fold3: preprocessor 1/1, model 1/2
## ✓ Fold3: preprocessor 1/1, model 1/2
## i Fold3: preprocessor 1/1, model 1/2 (predictions)
## i Fold3: preprocessor 1/1, model 2/2
## ✓ Fold3: preprocessor 1/1, model 2/2
## i Fold3: preprocessor 1/1, model 2/2 (predictions)
## i Fold4: preprocessor 1/1
## ✓ Fold4: preprocessor 1/1
## i Fold4: preprocessor 1/1, model 1/2
## ✓ Fold4: preprocessor 1/1, model 1/2
## i Fold4: preprocessor 1/1, model 1/2 (predictions)
## i Fold4: preprocessor 1/1, model 2/2
## ✓ Fold4: preprocessor 1/1, model 2/2
## i Fold4: preprocessor 1/1, model 2/2 (predictions)
## i Fold5: preprocessor 1/1
## ✓ Fold5: preprocessor 1/1
## i Fold5: preprocessor 1/1, model 1/2
## ✓ Fold5: preprocessor 1/1, model 1/2
## i Fold5: preprocessor 1/1, model 1/2 (predictions)
## i Fold5: preprocessor 1/1, model 2/2
## ✓ Fold5: preprocessor 1/1, model 2/2
## i Fold5: preprocessor 1/1, model 2/2 (predictions)
## ✓ Finished tuning Model Specification.
## ℹ Model Parameters:
## # A tibble: 1 × 8
## penalty mixture .metric .estimator mean n std_err .config
## <dbl> <dbl> <chr> <chr> <dbl> <int> <dbl> <chr>
## 1 0.00000000747 0.187 rmse standard 97.5 5 11.6 Preprocessor1_Mo…
## ℹ Prediction Error Comparison:
## # A tibble: 5 × 3
## .model_id rmse .model_desc
## <chr> <dbl> <chr>
## 1 1 178. ARIMA(0,1,0)(1,0,0)[12] WITH DRIFT W/ XGBOOST ERRORS
## 2 2 159. PROPHET W/ XGBOOST ERRORS
## 3 3 189. XGBOOST
## 4 4 207. RANDOMFOREST
## 5 ensemble 74.5 ENSEMBLE (MODEL SPEC)
##
## ── Final Model ──────────────────────────────────────────────────
## ℹ Model Workflow:
## ══ Workflow [trained] ══════════════════════════════════════════════════════════
## Preprocessor: Recipe
## Model: linear_reg()
##
## ── Preprocessor ────────────────────────────────────────────────────────────────
## 0 Recipe Steps
##
## ── Model ───────────────────────────────────────────────────────────────────────
##
## Call: glmnet::glmnet(x = maybe_matrix(x), y = y, family = "gaussian", alpha = ~0.18737892099889)
##
## Df %Dev Lambda
## 1 0 0.00 1144.00
## 2 3 5.98 1043.00
## 3 4 13.02 950.10
## 4 4 19.75 865.70
## 5 4 26.06 788.80
## 6 4 31.93 718.70
## 7 4 37.36 654.90
## 8 4 42.36 596.70
## 9 4 46.93 543.70
## 10 4 51.09 495.40
## 11 4 54.85 451.40
## 12 4 58.23 411.30
## 13 4 61.25 374.70
## 14 4 63.95 341.40
## 15 4 66.34 311.10
## 16 4 68.46 283.50
## 17 4 70.32 258.30
## 18 4 71.95 235.30
## 19 4 73.38 214.40
## 20 4 74.63 195.40
## 21 4 75.72 178.00
## 22 4 76.67 162.20
## 23 4 77.49 147.80
## 24 4 78.21 134.70
## 25 4 78.83 122.70
## 26 4 79.38 111.80
## 27 4 79.86 101.90
## 28 4 80.28 92.83
## 29 4 80.65 84.58
## 30 4 80.98 77.07
## 31 4 81.28 70.22
## 32 4 81.56 63.98
## 33 4 81.81 58.30
## 34 4 82.04 53.12
## 35 4 82.26 48.40
## 36 4 82.47 44.10
## 37 4 82.67 40.18
## 38 3 82.84 36.61
## 39 3 82.92 33.36
## 40 3 82.99 30.40
## 41 3 83.05 27.70
## 42 3 83.11 25.24
## 43 3 83.16 22.99
## 44 3 83.20 20.95
## 45 3 83.24 19.09
## 46 3 83.28 17.39
##
## ...
## and 54 more lines.
##
## 7.225 sec elapsed
ensemble_fit_xg<- resamples_fitted %>%
ensemble_model_spec(
model_spec = boost_tree(
mtry=tune(),
trees=tune(),
learn_rate = tune()
) %>% set_engine("xgboost"),
control = control_grid(verbose = TRUE)
)
## ── Tuning Model Specification ───────────────────────────────────
## ℹ Performing 5-Fold Cross Validation.
## i Creating pre-processing data to finalize unknown parameter: mtry
## i Fold1: preprocessor 1/1
## ✓ Fold1: preprocessor 1/1
## i Fold1: preprocessor 1/1, model 1/6
## ✓ Fold1: preprocessor 1/1, model 1/6
## i Fold1: preprocessor 1/1, model 1/6 (predictions)
## i Fold1: preprocessor 1/1, model 2/6
## ✓ Fold1: preprocessor 1/1, model 2/6
## i Fold1: preprocessor 1/1, model 2/6 (predictions)
## i Fold1: preprocessor 1/1, model 3/6
## ✓ Fold1: preprocessor 1/1, model 3/6
## i Fold1: preprocessor 1/1, model 3/6 (predictions)
## i Fold1: preprocessor 1/1, model 4/6
## ✓ Fold1: preprocessor 1/1, model 4/6
## i Fold1: preprocessor 1/1, model 4/6 (predictions)
## i Fold1: preprocessor 1/1, model 5/6
## ✓ Fold1: preprocessor 1/1, model 5/6
## i Fold1: preprocessor 1/1, model 5/6 (predictions)
## i Fold1: preprocessor 1/1, model 6/6
## ✓ Fold1: preprocessor 1/1, model 6/6
## i Fold1: preprocessor 1/1, model 6/6 (predictions)
## i Fold2: preprocessor 1/1
## ✓ Fold2: preprocessor 1/1
## i Fold2: preprocessor 1/1, model 1/6
## ✓ Fold2: preprocessor 1/1, model 1/6
## i Fold2: preprocessor 1/1, model 1/6 (predictions)
## i Fold2: preprocessor 1/1, model 2/6
## ✓ Fold2: preprocessor 1/1, model 2/6
## i Fold2: preprocessor 1/1, model 2/6 (predictions)
## i Fold2: preprocessor 1/1, model 3/6
## ✓ Fold2: preprocessor 1/1, model 3/6
## i Fold2: preprocessor 1/1, model 3/6 (predictions)
## i Fold2: preprocessor 1/1, model 4/6
## ✓ Fold2: preprocessor 1/1, model 4/6
## i Fold2: preprocessor 1/1, model 4/6 (predictions)
## i Fold2: preprocessor 1/1, model 5/6
## ✓ Fold2: preprocessor 1/1, model 5/6
## i Fold2: preprocessor 1/1, model 5/6 (predictions)
## i Fold2: preprocessor 1/1, model 6/6
## ✓ Fold2: preprocessor 1/1, model 6/6
## i Fold2: preprocessor 1/1, model 6/6 (predictions)
## i Fold3: preprocessor 1/1
## ✓ Fold3: preprocessor 1/1
## i Fold3: preprocessor 1/1, model 1/6
## ✓ Fold3: preprocessor 1/1, model 1/6
## i Fold3: preprocessor 1/1, model 1/6 (predictions)
## i Fold3: preprocessor 1/1, model 2/6
## ✓ Fold3: preprocessor 1/1, model 2/6
## i Fold3: preprocessor 1/1, model 2/6 (predictions)
## i Fold3: preprocessor 1/1, model 3/6
## ✓ Fold3: preprocessor 1/1, model 3/6
## i Fold3: preprocessor 1/1, model 3/6 (predictions)
## i Fold3: preprocessor 1/1, model 4/6
## ✓ Fold3: preprocessor 1/1, model 4/6
## i Fold3: preprocessor 1/1, model 4/6 (predictions)
## i Fold3: preprocessor 1/1, model 5/6
## ✓ Fold3: preprocessor 1/1, model 5/6
## i Fold3: preprocessor 1/1, model 5/6 (predictions)
## i Fold3: preprocessor 1/1, model 6/6
## ✓ Fold3: preprocessor 1/1, model 6/6
## i Fold3: preprocessor 1/1, model 6/6 (predictions)
## i Fold4: preprocessor 1/1
## ✓ Fold4: preprocessor 1/1
## i Fold4: preprocessor 1/1, model 1/6
## ✓ Fold4: preprocessor 1/1, model 1/6
## i Fold4: preprocessor 1/1, model 1/6 (predictions)
## i Fold4: preprocessor 1/1, model 2/6
## ✓ Fold4: preprocessor 1/1, model 2/6
## i Fold4: preprocessor 1/1, model 2/6 (predictions)
## i Fold4: preprocessor 1/1, model 3/6
## ✓ Fold4: preprocessor 1/1, model 3/6
## i Fold4: preprocessor 1/1, model 3/6 (predictions)
## i Fold4: preprocessor 1/1, model 4/6
## ✓ Fold4: preprocessor 1/1, model 4/6
## i Fold4: preprocessor 1/1, model 4/6 (predictions)
## i Fold4: preprocessor 1/1, model 5/6
## ✓ Fold4: preprocessor 1/1, model 5/6
## i Fold4: preprocessor 1/1, model 5/6 (predictions)
## i Fold4: preprocessor 1/1, model 6/6
## ✓ Fold4: preprocessor 1/1, model 6/6
## i Fold4: preprocessor 1/1, model 6/6 (predictions)
## i Fold5: preprocessor 1/1
## ✓ Fold5: preprocessor 1/1
## i Fold5: preprocessor 1/1, model 1/6
## ✓ Fold5: preprocessor 1/1, model 1/6
## i Fold5: preprocessor 1/1, model 1/6 (predictions)
## i Fold5: preprocessor 1/1, model 2/6
## ✓ Fold5: preprocessor 1/1, model 2/6
## i Fold5: preprocessor 1/1, model 2/6 (predictions)
## i Fold5: preprocessor 1/1, model 3/6
## ✓ Fold5: preprocessor 1/1, model 3/6
## i Fold5: preprocessor 1/1, model 3/6 (predictions)
## i Fold5: preprocessor 1/1, model 4/6
## ✓ Fold5: preprocessor 1/1, model 4/6
## i Fold5: preprocessor 1/1, model 4/6 (predictions)
## i Fold5: preprocessor 1/1, model 5/6
## ✓ Fold5: preprocessor 1/1, model 5/6
## i Fold5: preprocessor 1/1, model 5/6 (predictions)
## i Fold5: preprocessor 1/1, model 6/6
## ✓ Fold5: preprocessor 1/1, model 6/6
## i Fold5: preprocessor 1/1, model 6/6 (predictions)
## ✓ Finished tuning Model Specification.
## ℹ Model Parameters:
## # A tibble: 1 × 9
## mtry trees learn_rate .metric .estimator mean n std_err .config
## <int> <int> <dbl> <chr> <chr> <dbl> <int> <dbl> <chr>
## 1 3 77 0.0384 rmse standard 130. 5 23.7 Preprocessor1_M…
## ℹ Prediction Error Comparison:
## # A tibble: 5 × 3
## .model_id rmse .model_desc
## <chr> <dbl> <chr>
## 1 1 178. ARIMA(0,1,0)(1,0,0)[12] WITH DRIFT W/ XGBOOST ERRORS
## 2 2 159. PROPHET W/ XGBOOST ERRORS
## 3 3 189. XGBOOST
## 4 4 207. RANDOMFOREST
## 5 ensemble 107. ENSEMBLE (MODEL SPEC)
##
## ── Final Model ──────────────────────────────────────────────────
## ℹ Model Workflow:
## ══ Workflow [trained] ══════════════════════════════════════════════════════════
## Preprocessor: Recipe
## Model: boost_tree()
##
## ── Preprocessor ────────────────────────────────────────────────────────────────
## 0 Recipe Steps
##
## ── Model ───────────────────────────────────────────────────────────────────────
## ##### xgb.Booster
## raw: 45.5 Kb
## call:
## xgboost::xgb.train(params = list(eta = 0.0384445574650874, max_depth = 6,
## gamma = 0, colsample_bytree = 1, colsample_bynode = 0.75,
## min_child_weight = 1, subsample = 1, objective = "reg:squarederror"),
## data = x$data, nrounds = 77L, watchlist = x$watchlist, verbose = 0,
## nthread = 1)
## params (as set within xgb.train):
## eta = "0.0384445574650874", max_depth = "6", gamma = "0", colsample_bytree = "1", colsample_bynode = "0.75", min_child_weight = "1", subsample = "1", objective = "reg:squarederror", nthread = "1", validate_parameters = "TRUE"
## xgb.attributes:
## niter
## callbacks:
## cb.evaluation.log()
## # of features: 4
## niter: 77
## nfeatures : 4
## evaluation_log:
## iter training_rmse
## 1 1382.9149
## 2 1333.2954
## ---
## 76 110.6869
## 77 107.4778
##
## 50.32 sec elapsed
ensemble_tbl<- modeltime_table(
ensemble_fit_mean,
ensemble_fit_lm,
ensemble_fit_xg
)
ensemble_tbl%>%
combine_modeltime_tables(submodels_tbl)%>%
modeltime_accuracy(testing(splits))%>%
arrange(rmse,mae,mape,mase)%>%
knitr::kable()
## Warning: A correlation computation is required, but `estimate` is constant
## and has 0 standard deviation, resulting in a divide by 0 error. `NA` will be
## returned.
## Warning: A correlation computation is required, but `estimate` is constant
## and has 0 standard deviation, resulting in a divide by 0 error. `NA` will be
## returned.
| .model_id | .model_desc | .type | mae | mape | mase | smape | rmse | rsq |
|---|---|---|---|---|---|---|---|---|
| 4 | ARIMA(0,1,0)(1,0,0)[12] WITH DRIFT W/ XGBOOST ERRORS | Test | 66.62842 | 3.755342 | 0.7740205 | 3.805410 | 75.78838 | 0.7921893 |
| 5 | PROPHET W/ XGBOOST ERRORS | Test | 118.03429 | 6.525348 | 1.3712011 | 6.833269 | 141.17847 | 0.0918866 |
| 2 | ENSEMBLE (GLMNET STACK): 4 MODELS | Test | 149.58076 | 8.324326 | 1.7376755 | 8.748728 | 172.10912 | 0.8142009 |
| 1 | ENSEMBLE (MEAN): 4 MODELS | Test | 153.32419 | 8.522200 | 1.7811629 | 9.012713 | 175.05839 | 0.5887220 |
| 6 | XGBOOST | Test | 168.01570 | 9.335913 | 1.9518337 | 9.930132 | 192.05816 | NA |
| 3 | ENSEMBLE (XGBOOST STACK): 4 MODELS | Test | 205.39900 | 11.477075 | 2.3861145 | 12.310987 | 225.49015 | NA |
| 7 | RANDOMFOREST | Test | 309.22893 | 17.454369 | 3.5923038 | 19.225599 | 320.53272 | 0.3466154 |
ensemble_tbl%>%
modeltime_calibrate(testing(splits))%>%
modeltime_forecast(
new_data = testing(splits),
actual_data = data_prepared_tbl,
keep_data = T,
conf_by_id = T,
conf_interval = 0.95
)%>%
plot_modeltime_forecast(
.facet_ncol=2
)
## Warning: The 'id' column in calibration data was not detected. Global Confidence
## Interval is being returned.
ensemble_tbl%>%
combine_modeltime_tables(submodels_tbl)%>%
modeltime_calibrate(testing(splits))%>%
modeltime_forecast(
new_data = testing(splits),
actual_data = data_prepared_tbl,
keep_data = T
)%>%
plot_modeltime_forecast(
.facet_ncol=2
)
ensemble_tbl_all_model<-ensemble_tbl%>%
combine_modeltime_tables(submodels_tbl)
ensemble_refit_tbl <- ensemble_tbl%>%
modeltime_refit(data_prepared_tbl)
## frequency = 12 observations per 1 year
## Warning in mdl_time_refit.mdl_time_ensemble_model_spec(...): 'resamples' not
## provided during refitting. Submodels will be refit, but the meta-learner will
## *not* be refit. You can provide 'resamples' via `modeltime_refit(object, data,
## resamples, control)`. Proceeding by refitting the submodels only.
## frequency = 12 observations per 1 year
## Warning in mdl_time_refit.mdl_time_ensemble_model_spec(...): 'resamples' not
## provided during refitting. Submodels will be refit, but the meta-learner will
## *not* be refit. You can provide 'resamples' via `modeltime_refit(object, data,
## resamples, control)`. Proceeding by refitting the submodels only.
## frequency = 12 observations per 1 year
ensemble_refit_tbl%>%
modeltime_forecast(
new_data = future_tbl,
actual_data = data_prepared_tbl,
keep_data = T
)%>%
plot_modeltime_forecast(
.facet_ncol=2
)
## Warning: Expecting the following names to be in the data frame: .conf_hi, .conf_lo.
## Proceeding with '.conf_interval_show = FALSE' to visualize the forecast without confidence intervals.
## Alternatively, try using `modeltime_calibrate()` before forecasting to add confidence intervals.
# otlier_crc
# plot(otlier_crc)
# plot(otlier_usd)
# otlier_usd
#descompo